This repository has been archived by the owner on Nov 20, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 2
/
actions-generate-docs.py
326 lines (275 loc) · 11.7 KB
/
actions-generate-docs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
# Copyright 2019 Autodesk, Inc. All rights reserved.
#
# Use of this software is subject to the terms of the Autodesk license agreement
# provided at the time of installation or download, or which otherwise accompanies
# this software in either electronic or hard copy form.
#
import logging
import boto3
import os
import sys
import mimetypes
import subprocess
import yaml
import shutil
# set up logging channel for this script
log = logging.getLogger(__name__)
def upload_folder_to_s3(s3_bucket, s3_client, src, dst):
"""
Upload folder to S3, recursively.
:param str s3_bucket: Bucket to upload to
:param s3_client: boto3 s3 client object
:param str src: Source path
:param str dst: S3 destination path
"""
names = os.listdir(src)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.isdir(srcname):
upload_folder_to_s3(s3_bucket, s3_client, srcname, dstname)
else:
log.info("S3 upload: '{}' -> '{}'".format(srcname, dstname))
# auto detect mime type
(mime_type, _) = mimetypes.guess_type(srcname)
if mime_type is None:
mime_type = "application/octet-stream"
# upload
with open(srcname, "rb") as file_handle:
s3_client.put_object(
Bucket=s3_bucket,
ContentType=mime_type,
Key=dstname,
Body=file_handle
)
def execute_external_command(cmd):
"""
Executes the given command line,
logs output and raises on failure
:param str cmd: Command to execute
:returns: The output generated
:raises: SubprocessError on failure
"""
log.info("Executing command '{}'".format(cmd))
p = subprocess.Popen(cmd, shell=True)
stdout, stderr = p.communicate()
if p.returncode != 0:
err_msg = f"Failed to execute external command: {cmd}"
log.error(err_msg)
raise subprocess.SubprocessError(err_msg)
output = "{}\n{}".format(stdout, stderr)
log.info(output)
return output
def generate_pull_request_comment(doc_url):
"""
Generates a comment pointing at a web url in the current PR
:param doc_url: url to link to
"""
if "TK_GITHUB_TOKEN" not in os.environ:
log.error("Cannot add comment to pull request with link "
"to docs - no TK_GITHUB_TOKEN env var defined.")
else:
log.info("Adding PR comment with link to generated documentation...")
cmd = "curl -H 'Authorization: token {token}' -X POST ".format(
token=os.environ["TK_GITHUB_TOKEN"]
)
cmd += "-d '{\"body\": \"[Documentation Preview](%s)\"}' " % (
doc_url,
)
cmd += "'https://api.github.com/repos/{repo_slug}/issues/{pull_request}/comments'".format( # noqa
repo_slug=os.environ["GITHUB_REPOSITORY"],
pull_request=os.environ["PR_NUMBER"]
)
execute_external_command(cmd)
def parse_jekyll_configs(config_paths):
"""
Parse jekyll config files in order of precedence from low to high, producing
a config dictionary containing all found key/values.
:param list config_paths: The list of jekyll config paths to check / parse
:returns: The resulting jekyll config dictionary.
"""
output_config = {}
for config_path in config_paths:
if os.path.exists(config_path):
with open(config_path, 'r') as config_file:
config = yaml.safe_load(config_file)
output_config.update(config)
return output_config
def copy_image_tree(source_dir, target_dir, overwrite=False):
"""
Copy an image tree from an i18n source to the language target.
:param str source_dir: The source for translated images
:param str target_dir: The directory to copy the images to
:param bool overwrite: Whether existing images in the target directory
should be overwritten.
"""
log.info("Copying image tree from {} to {}...".format(source_dir, target_dir))
log.info("(existing files will {}be overwritten.)".format("" if overwrite else "not "))
for current_path, dirnames, filenames in os.walk(source_dir):
relative_dir = os.path.relpath(current_path, source_dir)
target = os.path.join(target_dir, relative_dir)
# ensure the target dir exists
if not os.path.exists(target):
os.makedirs(target)
for fn in filenames:
source_img = os.path.join(current_path, fn)
destination_img = os.path.join(target, fn)
if os.path.exists(destination_img):
if not overwrite:
continue
if os.path.samefile(source_img, destination_img):
continue
log.info("{} already exists, removing and overwriting...".format(destination_img))
os.remove(destination_img)
log.info("Copying {} to {}...".format(source_img, destination_img))
shutil.copy(source_img, destination_img)
def cleanup_image_i18n(config_paths, build_dir):
"""
Iterates over i18n targets found in the jekyll config, and cleans up
duplicate / miscopied images from build. If no i18n image is found, the
default language image is copied in its place.
:param str config_path: The path to the jekyll config to read i18n targets
and default language from.
:param str build_dir: The directory that the jekyll site was built to.
"""
# determine list of i18n targets
config = parse_jekyll_configs(config_paths)
try:
languages = config['languages']
default_lang = config['default_lang']
except KeyError:
log.error("Could not find `languages` / `default_lang` key in jekyll config.")
raise
target_languages = [l for l in languages if l is not default_lang]
log.info("Default language: {}, targeting {}".format(default_lang, target_languages))
# iterate over i81n target languages
for lang in target_languages:
lang_base = os.path.join(build_dir, lang)
img_src = os.path.join(lang_base, lang)
default_image_src = os.path.join(lang_base, default_lang)
# If the source directory for this target is missing here, log a warning.
if not os.path.exists(img_src):
log.warning("No image source dir found for {}, skipping...".format(lang))
else:
# Iterate over the images in the source directory, and copy them over
# any images in the path that they should exist in.
copy_image_tree(img_src, lang_base, overwrite=True)
# Now do the same, but with the default language, and only copy missing
# images.
if not os.path.exists(default_image_src):
log.warning("No default image source dir found for {}, skipping...".format(lang))
else:
copy_image_tree(default_image_src, lang_base, overwrite=False)
# remove the duplicated source image directories
for img_dir_lang in languages:
images_dir = os.path.join(lang_base, img_dir_lang)
if os.path.exists(images_dir):
# Remove duplicate image subdirectories
# ignore errors since leaving files here is not harmful.
shutil.rmtree(images_dir, ignore_errors=True)
def main():
"""
Execute CI operations
"""
# expected file and build locations
this_folder = os.path.abspath(os.path.dirname(__file__))
# note - attempt to detect if we are running this for our own
# ./docs folder or we are a submodule
root_path = os.path.abspath(os.path.join(this_folder, ".."))
if os.path.exists(os.path.join(root_path, ".gitmodules")):
# a .gitmodules folder exists in the parent location
# this means that we are running as a git submodule
# inside another repo
log.info("Running as a git submodule...")
else:
# Looks like we are not a submodule.
root_path = os.path.abspath(this_folder)
doc_script = os.path.join(this_folder, "scripts", "build_docs.sh")
output_path = os.path.join(root_path, "_build")
source_path = os.path.join(root_path, "docs")
config_paths = [
os.path.join(this_folder, "jekyll", "_config.yml"),
os.path.join(root_path, "jekyll", "_config.yml"),
os.path.join(root_path, "jekyll_config.yml"),
]
# first figure out if we are in a PR.
if os.environ.get("GITHUB_EVENT_NAME") == "pull_request":
# we are in a PR.
log.info("Inside a pull request.")
# see if we have access to an AWS bucket
if "S3_BUCKET" in os.environ and "S3_WEB_URL" in os.environ:
log.info("Detected AWS S3 bucket for preview workflow.")
s3_bucket = os.environ["S3_BUCKET"]
target_url = os.environ["S3_WEB_URL"]
target_url_path = "/tk-doc-generator/{commit}".format(
commit=os.environ["GITHUB_SHA"]
)
else:
log.warning("No S3_BUCKET and S3_WEB_URL detected in environment. "
"No S3 preview will be generated")
s3_bucket = None
# enter dummy paths so we can at least build
# the docs to check for errors
target_url = "https://dummy.url.com"
target_url_path = "/"
target_full_url = "{url}{path}/index.html".format(url=target_url, path=target_url_path)
# build the doc
doc_command = "{script} --url={url} --url-path={path} --source={source} --output={output}".format( # noqa
script=doc_script,
url=target_url,
path=target_url_path,
source=source_path,
output=output_path
)
execute_external_command(doc_command)
# cleanup image i18n
cleanup_image_i18n(config_paths, output_path)
if s3_bucket:
log.info("Uploading build result to S3...")
s3_client = boto3.client(
"s3",
aws_access_key_id=os.environ["AWS_S3_ACCESS_KEY"],
aws_secret_access_key=os.environ["AWS_S3_ACCESS_TOKEN"]
)
# note: skip the first slash when uploading to S3
# in order to generate a correct path.
upload_folder_to_s3(
s3_bucket,
s3_client,
output_path,
target_url_path[1:]
)
# Add a comment to the PR to link to the generated docs
generate_pull_request_comment(target_full_url)
else:
# inside master
log.info("Inside master. Will build docs "
"to prepare for a deploy to gh-pages")
if not("DOC_URL" in os.environ and "DOC_PATH" in os.environ):
raise RuntimeError("Need to define DOC_URL and DOC_PATH")
# build the doc
doc_command = "{script} --url={url} --url-path={path} --source={source} --output={output}".format( # noqa
script=doc_script,
url=os.environ["DOC_URL"],
path=os.environ["DOC_PATH"],
source=source_path,
output=output_path
)
execute_external_command(doc_command)
# cleanup image i18n
cleanup_image_i18n(config_paths, output_path)
if __name__ == "__main__":
log.setLevel(logging.INFO)
ch = logging.StreamHandler()
formatter = logging.Formatter("%(levelname)s %(message)s")
ch.setFormatter(formatter)
log.addHandler(ch)
log.info("CI documentation job starting up.")
exit_code = 1
try:
main()
exit_code = 0
except Exception:
log.exception("An exception was raised!")
log.info("Exiting with code {}.".format(exit_code))
sys.exit(exit_code)