forked from github/docs
-
Notifications
You must be signed in to change notification settings - Fork 0
/
check-s3-images.js
executable file
·151 lines (114 loc) · 4.87 KB
/
check-s3-images.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
#!/usr/bin/env node
const fs = require('fs')
const path = require('path')
const { chain, difference } = require('lodash')
const loadPages = require('../lib/pages')
const loadSiteData = require('../lib/site-data')
const renderContent = require('../lib/render-content')
const enterpriseServerReleases = require('../lib/enterprise-server-releases').supported
const patterns = require('../lib/patterns')
const authenticateToAWS = require('../lib/authenticate-to-aws.js')
const readlineSync = require('readline-sync')
const { execSync } = require('child_process')
const uploadScript = path.join(process.cwd(), 'script/upload-enterprise-images-to-s3.js')
// [start-readme]
//
// Run this script in your branch to check whether any images referenced in Enterprise content are
// not in the expected S3 bucket. You will need to authenticate to S3 via `awssume` to use this script.
// Instructions for the one-time setup are [here](https://github.com/github/product-documentation/blob/master/doc-team-workflows/workflow-information-for-all-writers/setting-up-awssume-and-s3cmd.md).
//
// [end-readme]
main()
async function main () {
const s3 = await authenticateToAWS()
console.log('Working...\n')
const pages = await getEnglishPages()
const siteData = await getEnglishSiteData()
const s3References = []
for (const version of enterpriseServerReleases) {
for (const page of pages) {
// skip page if it doesn't have a permalink for the current product version
if (!page.permalinks.some(permalink => permalink.pageVersion === version)) continue
// skip index pages because they do not contain images
if (page.relativePath.endsWith('index.md')) continue
// build fake context object for rendering the page
page.version = version
const context = {
page,
site: siteData
}
const rendered = await renderContent(page.markdown, context)
const imageReferences = rendered.match(patterns.imagePath)
if (!imageReferences) continue
imageReferences.forEach(ref => {
s3References.push(`enterprise/${version}${ref}`)
})
}
}
// store all images referenced in Enterprise content
const s3ReferencesToCheck = chain(s3References).uniq().sort().value()
console.log(`Found ${s3ReferencesToCheck.length} images referenced in Enterprise content in the current checkout.\n`)
console.log('Checking the github-images S3 bucket...\n')
const imagesOnS3 = []
for (const version of enterpriseServerReleases) {
const versionDirectory = `enterprise/${version}`
imagesOnS3.push(await listObjects(s3, versionDirectory, imagesOnS3))
}
// store all found images on s3
const allImagesOnS3 = chain(imagesOnS3).flatten().uniq().sort().value()
const imagesMissingFromS3 = difference(s3ReferencesToCheck, allImagesOnS3)
// return early if there are no missing images
if (!imagesMissingFromS3.length) {
console.log('All images are in S3 that should be!')
return
}
console.log(`${imagesMissingFromS3.length} images are missing from S3:\n\n${imagesMissingFromS3.join('\n')}`)
const prompt = `\nDo you want to try to upload these images to S3 from your local checkout?
\nPress Y to continue, or press enter any other key to cancel: `
const answer = readlineSync.question(prompt)
if (!answer.match(/^Y$/mi)) {
console.log('Exiting!')
process.exit()
}
console.log('Trying to upload...\n')
imagesMissingFromS3.forEach(missingImage => {
// s3 path: enterprise/2.19/assets/images/developer/graphql/insomnia-base-url-and-pat.png
// local path: assets/images/developer/graphql/insomnia-base-url-and-pat.png
const version = missingImage.split('/')[1]
const localPath = missingImage.replace(`enterprise/${version}/`, '')
const fullPath = path.join(process.cwd(), localPath)
if (!fs.existsSync(fullPath)) {
console.log(`cannot upload ${localPath}, file not found`)
return
}
const result = execSync(`${uploadScript} --core --single ${localPath} ${version}`)
console.log(result.toString())
})
console.log('Done uploading! Checking S3 again.')
main()
}
async function getEnglishPages () {
const pages = await loadPages()
return pages.filter(page => page.languageCode === 'en')
}
async function getEnglishSiteData () {
const siteData = await loadSiteData()
return siteData.en.site
}
async function listObjects (s3, versionDirectory, imagesOnS3, token) {
const params = {
Bucket: 'github-images',
StartAfter: versionDirectory
}
if (token) params.ContinuationToken = token
const data = await s3.listObjectsV2(params).promise()
const matchingKeys = data.Contents
.map(obj => obj.Key)
.filter(imageFile => imageFile.startsWith(versionDirectory))
if (!matchingKeys.length) return []
imagesOnS3.push(matchingKeys)
if (data.IsTruncated) {
await listObjects(s3, versionDirectory, imagesOnS3, data.NextContinuationToken)
}
return imagesOnS3
}