Skip to content

Commit

Permalink
[tools] Fix hanging of docs.modm.io generator
Browse files Browse the repository at this point in the history
  • Loading branch information
salkinium committed Dec 15, 2024
1 parent 0588172 commit 638423f
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 18 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/deploy-docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@ jobs:
# Niklas: Running on the old container until I can figure out why the doc generator hangs
api-docs-all:
runs-on: ubuntu-24.04
runs-on: ubuntu-20.04
container:
image: ghcr.io/modm-ext/modm-build-base:2024-12-01
image: ghcr.io/modm-ext/modm-build-base:2022-09-27
steps:
- name: Check out repository
uses: actions/checkout@v4
Expand All @@ -62,7 +62,7 @@ jobs:
git config --global --add safe.directory /__w/modm/modm
- name: Update lbuild
run: |
pip3 install --upgrade --upgrade-strategy=eager --break-system-packages modm
pip3 install --upgrade --upgrade-strategy=eager modm
- name: Full run of docs.modm.io-generator-script
run: |
export TERM=xterm-256color
Expand Down
9 changes: 9 additions & 0 deletions .github/workflows/linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -393,9 +393,18 @@ jobs:
- name: Fix Git permission/ownership problem
run: |
git config --global --add safe.directory /__w/modm/modm
- name: Check environment
run: |
env
locale -a
python --version || true
python3 --version || true
python3 -c "import os; print(os.cpu_count())"
- name: Update lbuild
run: |
pip3 install --upgrade --upgrade-strategy=eager modm
which lbuild
lbuild --version
- name: Install mkdocs mkdocs-material (pip)
run: |
pip3 install --upgrade --upgrade-strategy=eager mkdocs mkdocs-material
Expand Down
30 changes: 15 additions & 15 deletions tools/scripts/docs_modm_io_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,17 +145,17 @@ def main():
print("Starting to generate documentation...")
template_overview(output_dir, device_list, board_list, template_path)
print(f"... for {len(device_list) + len(board_list)} devices, estimated memory footprint is {len(device_list)*70+2000} MB")
# We can only pass one argument to pool.map
devices = [f'python3 {filepath} --target-job "{modm_path}|{tempdir}|{dev}||{args.deduplicate}|{args.compress}"' for dev in device_list]
devices += [f'python3 {filepath} --target-job "{modm_path}|{tempdir}|{dev}|{brd}|{args.deduplicate}|{args.compress}"' for (brd, dev) in board_list]
devices = list(set(devices))
# Run the first generation first so that the other jobs can already deduplicate properly
results = [subprocess.call(devices[0], shell=True)]
with ThreadPool(args.jobs) as pool:
# We can only pass one argument to pool.map
devices = [f'python3 {filepath} --target-job "{modm_path}|{tempdir}|{dev}||{args.deduplicate}|{args.compress}"' for dev in device_list]
devices += [f'python3 {filepath} --target-job "{modm_path}|{tempdir}|{dev}|{brd}|{args.deduplicate}|{args.compress}"' for (brd, dev) in board_list]
devices = list(set(devices))
# Run the first generation first so that the other jobs can already deduplicate properly
results = [subprocess.call(devices[0], shell=True)]
results += pool.map(lambda d: subprocess.call(d, shell=True), devices[1:])
# remove all the hash files
for file in (output_dir / "develop/api").glob("*.hash"):
file.unlink()
# remove all the hash files
for file in (output_dir / "develop/api").glob("*.hash"):
file.unlink()
if args.compress:
print("Zipping docs ...")
# Zipping is *much* faster via command line than via python!
Expand All @@ -180,7 +180,7 @@ def create_target(argument):
tempdir = Path(tempdir)
output_dir = board if board else device
try:
print(f"Generating documentation for {output_dir}...")
print(f"Generating documentation for {output_dir}...", flush=True)

options = [f"modm:target={device}"]
if device.startswith("at"):
Expand All @@ -202,19 +202,19 @@ def create_target(argument):

builder.build(output_dir, modules)

print(f"Executing: (cd {output_dir}/modm/docs/ && doxypress doxypress.json)")
print(f"Executing: (cd {output_dir}/modm/docs/ && doxypress doxypress.json)", flush=True)
retval = subprocess.call(f"(cd {output_dir}/modm/docs/ && doxypress doxypress.json > /dev/null 2>&1)", shell=True)
# retval = subprocess.call(f"(cd {output_dir}/modm/docs/ && doxygen doxyfile.cfg > /dev/null 2>&1)", shell=True)
if retval != 0:
print(f"Error {retval} generating documentation for device {output_dir}.")
print(f"Error {retval} generating documentation for device {output_dir}.", flush=True)
return False
print(f"Finished generating documentation for device {output_dir}.")
print(f"Finished generating documentation for device {output_dir}.", flush=True)

srcdir = (tempdir / output_dir / "modm/docs/html")
destdir = tempdir / "output/develop/api" / output_dir

if deduplicate == "True":
print(f"Deduplicating files for {device}...")
print(f"Deduplicating files for {device}...", flush=True)
# Find and build the hash symlink database
hashdb = {}
for hashes in tempdir.glob("output/develop/api/*.hash"):
Expand Down Expand Up @@ -266,7 +266,7 @@ def create_target(argument):
srcdir.rename(destdir)
return True
except Exception as e:
print(f"Error generating documentation for device {output_dir}: {e}")
print(f"Error generating documentation for device {output_dir}: {e}", flush=True)
return False


Expand Down

0 comments on commit 638423f

Please sign in to comment.