|
1 | 1 | # Copyright (c) Microsoft Corporation. All rights reserved.
|
2 | 2 | # Licensed under the MIT License.
|
3 | 3 | """All the action we need during build"""
|
| 4 | + |
4 | 5 | import hashlib
|
5 | 6 | import io
|
6 | 7 | import json
|
@@ -75,21 +76,24 @@ def install_bundled_libs(session):
|
75 | 76 | download_url(debugpy_info["any"])
|
76 | 77 |
|
77 | 78 |
|
78 |
| -def download_url(value): |
79 |
| - with url_lib.urlopen(value["url"]) as response: |
80 |
| - data = response.read() |
81 |
| - hash_algorithm, hash_value = [ |
82 |
| - (key, value) for key, value in value["hash"].items() |
83 |
| - ][0] |
84 |
| - if hashlib.new(hash_algorithm, data).hexdigest() != hash_value: |
85 |
| - raise ValueError("Failed hash verification for {}.".format(value["url"])) |
| 79 | +def download_url(values): |
| 80 | + for value in values: |
| 81 | + with url_lib.urlopen(value["url"]) as response: |
| 82 | + data = response.read() |
| 83 | + hash_algorithm, hash_value = [ |
| 84 | + (key, value) for key, value in value["hash"].items() |
| 85 | + ][0] |
| 86 | + if hashlib.new(hash_algorithm, data).hexdigest() != hash_value: |
| 87 | + raise ValueError( |
| 88 | + "Failed hash verification for {}.".format(value["url"]) |
| 89 | + ) |
86 | 90 |
|
87 |
| - print("Download: ", value["url"]) |
88 |
| - with zipfile.ZipFile(io.BytesIO(data), "r") as wheel: |
89 |
| - libs_dir = pathlib.Path.cwd() / "bundled" / "libs" |
90 |
| - for zip_info in wheel.infolist(): |
91 |
| - print("\t" + zip_info.filename) |
92 |
| - wheel.extract(zip_info.filename, libs_dir) |
| 91 | + print("Download: ", value["url"]) |
| 92 | + with zipfile.ZipFile(io.BytesIO(data), "r") as wheel: |
| 93 | + libs_dir = pathlib.Path.cwd() / "bundled" / "libs" |
| 94 | + for zip_info in wheel.infolist(): |
| 95 | + print("\t" + zip_info.filename) |
| 96 | + wheel.extract(zip_info.filename, libs_dir) |
93 | 97 |
|
94 | 98 |
|
95 | 99 | @nox.session()
|
@@ -141,18 +145,24 @@ def _get_debugpy_info(version="latest", platform="none-any", cp="cp311"):
|
141 | 145 |
|
142 | 146 |
|
143 | 147 | @nox.session
|
144 |
| -@nox.parametrize("version", ["1.5.1", "1.7.0", "latest"]) |
145 |
| -@nox.parametrize("cp", ["cp39", "cp311"]) |
146 |
| -def create_debugpy_json(session: nox.Session, version, cp): |
| 148 | +def create_debugpy_json(session: nox.Session): |
147 | 149 | platforms = [
|
148 | 150 | ("macOS", "macosx"),
|
149 |
| - ("win32", "win32"), |
| 151 | + # ("win32", "win32"), # VS Code does not support 32-bit Windows anymore |
150 | 152 | ("win64", "win_amd64"),
|
151 | 153 | ("linux", "manylinux"),
|
152 | 154 | ("any", "none-any"),
|
153 | 155 | ]
|
154 | 156 | debugpy_info_json_path = pathlib.Path(__file__).parent / "debugpy_info.json"
|
155 |
| - debugpy_info = {p: _get_debugpy_info(version, id, cp) for p, id in platforms} |
| 157 | + debugpy_info = {} |
| 158 | + for p, id in platforms: |
| 159 | + # we typically have the latest 3 versions of debugpy compiled bits |
| 160 | + downloads = [] |
| 161 | + for cp in ["cp310", "cp311", "cp312"]: |
| 162 | + data = _get_debugpy_info("latest", id, cp) |
| 163 | + if not any(d["url"] == data["url"] for d in downloads): |
| 164 | + downloads.append(data) |
| 165 | + debugpy_info[p] = downloads |
156 | 166 | debugpy_info_json_path.write_text(
|
157 | 167 | json.dumps(debugpy_info, indent=4), encoding="utf-8"
|
158 | 168 | )
|
0 commit comments