diff --git a/Dockerfile b/Dockerfile index b600e02..d7351a5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,9 +5,9 @@ WORKDIR websrc/ RUN yarn RUN yarn build -FROM openvino/ubuntu18_dev:2021.4 +FROM openvino/ubuntu20_dev:2021.4 -COPY --from=openvino/ubuntu18_dev:2021.3 /opt/intel/openvino /opt/intel/openvino2021_3 +COPY --from=openvino/ubuntu20_dev:2021.3 /opt/intel/openvino /opt/intel/openvino2021_3 COPY --from=openvino/ubuntu18_dev:2021.2 /opt/intel/openvino /opt/intel/openvino2021_2 COPY --from=openvino/ubuntu18_dev:2021.1 /opt/intel/openvino /opt/intel/openvino2021_1 COPY --from=openvino/ubuntu18_dev:2020.4 /opt/intel/openvino /opt/intel/openvino2020_4 @@ -17,13 +17,18 @@ COPY --from=openvino/ubuntu18_dev:2020.1 /opt/intel/openvino /opt/intel/openvino COPY --from=openvino/ubuntu18_dev:2019_R3.1 /opt/intel/openvino /opt/intel/openvino2019_3 USER root -RUN apt-get update && apt-get -y upgrade -RUN apt-get install -y python-dev python3-dev nano +RUN apt-get update && apt-get -y upgrade && apt-get install -y software-properties-common +RUN add-apt-repository ppa:deadsnakes/ppa +RUN apt-get update +RUN apt-get install -y python3-dev nano git git-lfs python3.7 python3.7-venv WORKDIR /app RUN chown openvino:openvino /app USER openvino ENV PYTHONUNBUFFERED 1 +RUN git lfs install +RUN git clone https://github.com/luxonis/depthai-model-zoo.git + ADD setup_container.py . RUN python3 setup_container.py ADD requirements.txt . diff --git a/cli/README.md b/cli/README.md index e332a1b..54ffdf9 100644 --- a/cli/README.md +++ b/cli/README.md @@ -11,14 +11,16 @@ python3 -m pip install blobconverter ## Usage ``` -usage: blobconverter [-h] [-zn ZOO_NAME] [-onnx ONNX_MODEL] [-cp CAFFE_PROTO] [-cm CAFFE_MODEL] [-tf TENSORFLOW_PB] [-ox OPENVINO_XML] [-ob OPENVINO_BIN] [-rawn RAW_NAME] - [-rawc RAW_CONFIG] [-sh {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16}] [-dt DATA_TYPE] [-o OUTPUT_DIR] [-v VERSION] [--optimizer-params OPTIMIZER_PARAMS] - [--compile-params COMPILE_PARAMS] [--converter-url URL] [--no-cache] [--zoo-list] [--download-ir] +usage: blobconverter [-h] [-zn ZOO_NAME] [-zt ZOO_TYPE] [-onnx ONNX_MODEL] [-cp CAFFE_PROTO] [-cm CAFFE_MODEL] [-tf TENSORFLOW_PB] [-ox OPENVINO_XML] [-ob OPENVINO_BIN] + [-rawn RAW_NAME] [-rawc RAW_CONFIG] [-sh {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16}] [-dt DATA_TYPE] [-o OUTPUT_DIR] [-v VERSION] + [--optimizer-params OPTIMIZER_PARAMS] [--compile-params COMPILE_PARAMS] [--converter-url URL] [--no-cache] [--zoo-list] [--download-ir] optional arguments: -h, --help show this help message and exit -zn ZOO_NAME, --zoo-name ZOO_NAME Name of a model to download from OpenVINO Model Zoo + -zt ZOO_TYPE, --zoo-type ZOO_TYPE + Type of the model zoo to use, available: "intel", "depthai" -onnx ONNX_MODEL, --onnx-model ONNX_MODEL Path to ONNX .onnx file -cp CAFFE_PROTO, --caffe-proto CAFFE_PROTO @@ -193,3 +195,25 @@ blob_path = blobconverter.from_config( shaves=5, ) ``` + +### Use [DepthAI Model Zoo](https://github.com/luxonis/depthai-model-zoo) to download files + +```python +import blobconverter + +blob_path = blobconverter.from_zoo(name="megadepth", zoo_type="depthai") +``` + +### Download using URLs instead of local files +```python +import blobconverter + +blob_path = blobconverter.from_openvino( + xml="https://storage.openvinotoolkit.org/repositories/open_model_zoo/2021.4/models_bin/3/age-gender-recognition-retail-0013/FP16/age-gender-recognition-retail-0013.xml", + xml_size=31526, + xml_sha256="54d62ce4a3c3d7f1559a22ee9524bac41101103a8dceaabec537181995eda655", + bin="https://storage.openvinotoolkit.org/repositories/open_model_zoo/2021.4/models_bin/3/age-gender-recognition-retail-0013/FP16/age-gender-recognition-retail-0013.bin", + bin_size=4276038, + bin_sha256="3586df5340e9fcd73ba0e2d802631bd9e027179490635c03b273d33d582e2b58" +) +``` diff --git a/cli/blobconverter/__init__.py b/cli/blobconverter/__init__.py index 79fcd7f..e369eed 100644 --- a/cli/blobconverter/__init__.py +++ b/cli/blobconverter/__init__.py @@ -6,6 +6,7 @@ import urllib from io import StringIO from pathlib import Path +from os import path import boto3 import botocore @@ -25,6 +26,16 @@ class Versions: v2019_R3 = "2019.R3" +def get_filename(url): + fragment_removed = url.split("#")[0] # keep to left of first # + query_string_removed = fragment_removed.split("?")[0] + scheme_removed = query_string_removed.split("://")[-1].split(":")[-1] + + if scheme_removed.find("/") == -1: + return "" + return path.basename(scheme_removed) + + class ConfigBuilder: def __init__(self, precision="FP16"): self.precision = precision @@ -55,21 +66,21 @@ def with_file(self, name, path=None, url=None, google_drive=None, size=None, sha "$type": "http", "url": "$REQUEST/{}".format(file_path.name) } - elif size is None or sha256 is None: - raise RuntimeError( - "Both \"size\" and \"sha256\" params must be provided! (can only be omitted if using \"path\" param)" - ) elif url is not None: file_entry["source"] = url - file_entry["size"] = size - file_entry["sha256"] = sha256 + if size is not None: + file_entry["size"] = size + if sha256 is not None: + file_entry["sha256"] = sha256 elif google_drive is not None: file_entry["source"] = { "$type": "google_drive", "id": google_drive } - file_entry["size"] = size - file_entry["sha256"] = sha256 + if size is not None: + file_entry["size"] = size + if sha256 is not None: + file_entry["sha256"] = sha256 else: raise RuntimeError("No file source specified!") @@ -96,6 +107,7 @@ def build(self): "--scale_values=[255,255,255]", ], "silent": False, + "zoo_type": "intel", } try: s3 = boto3.resource('s3', config=botocore.client.Config(signature_version=botocore.UNSIGNED)) @@ -109,7 +121,7 @@ def build(self): def set_defaults(url=None, version=None, shaves=None, output_dir=None, compile_params: list = None, - optimizer_params: list = None, data_type=None, silent=None): + optimizer_params: list = None, data_type=None, silent=None, zoo_type=None): if url is not None: __defaults["url"] = url if version is not None: @@ -126,6 +138,8 @@ def set_defaults(url=None, version=None, shaves=None, output_dir=None, compile_p __defaults["data_type"] = data_type if silent is not None: __defaults["silent"] = silent + if zoo_type is not None: + __defaults["zoo_type"] = zoo_type def show_progress(curr, max): @@ -176,7 +190,7 @@ def __download_from_response(resp, fpath: Path): def compile_blob(blob_name, version=None, shaves=None, req_data=None, req_files=None, output_dir=None, url=None, - use_cache=True, compile_params=None, data_type=None, download_ir=False): + use_cache=True, compile_params=None, data_type=None, download_ir=False, zoo_type=None): if shaves is None: shaves = __defaults["shaves"] if url is None: @@ -191,6 +205,8 @@ def compile_blob(blob_name, version=None, shaves=None, req_data=None, req_files= data_type = __defaults["data_type"] if req_files is None: req_files = {} + if zoo_type is None: + zoo_type = __defaults["zoo_type"] blob_path = Path(output_dir) / Path("{}_openvino_{}_{}shave.blob".format(blob_name, version, shaves)) cache_config_path = Path(__defaults["output_dir"]) / '.config.json' @@ -210,6 +226,7 @@ def compile_blob(blob_name, version=None, shaves=None, req_data=None, req_files= "myriad_params_advanced": ' '.join(compile_params), "data_type": data_type, "download_ir": download_ir, + 'zoo_type': zoo_type, **req_data, } @@ -259,7 +276,7 @@ def compile_blob(blob_name, version=None, shaves=None, req_data=None, req_files= try: print(json.dumps(response.json(), indent=4)) except: - pass + print(response.text) response.raise_for_status() blob_path.parent.mkdir(parents=True, exist_ok=True) @@ -278,114 +295,117 @@ def from_zoo(name, **kwargs): return compile_blob(name, req_data=body, **kwargs) -def from_caffe(proto, model, data_type=None, optimizer_params=None, **kwargs): +def from_caffe(proto, model, data_type=None, optimizer_params=None, proto_size=None, proto_sha256=None, model_size=None, model_sha256=None, **kwargs): if optimizer_params is None: optimizer_params = __defaults["optimizer_params"] if data_type is None: data_type = __defaults["data_type"] - proto_path = Path(proto) - model_path = Path(model) - model_req_name = proto_path.with_suffix('.caffemodel').name - config_path = ConfigBuilder()\ + proto_name = get_filename(proto) + model_name = get_filename(model) + files = {} + builder = ConfigBuilder()\ .task_type("detection")\ - .framework("caffe")\ - .with_file(proto_path.name, proto_path)\ - .with_file(model_req_name, model_path)\ + .framework("caffe") \ .model_optimizer_args(optimizer_params + [ "--data_type={}".format(data_type), - "--input_model=$dl_dir/{}/{}".format(data_type, model_req_name), - "--input_proto=$dl_dir/{}/{}".format(data_type, proto_path.name), - ])\ - .build() - files = { - 'config': config_path, - model_req_name: model_path, - proto_path.name: proto_path - } - body = { - "name": proto_path.stem, - } + "--input_model=$dl_dir/{}/{}".format(data_type, model_name), + "--input_proto=$dl_dir/{}/{}".format(data_type, proto_name), + ]) - return compile_blob(blob_name=proto_path.stem, req_data=body, req_files=files, data_type=data_type, **kwargs) + if str(proto).startswith("http"): + builder = builder.with_file(name=get_filename(proto), url=proto, size=proto_size, sha256=proto_sha256) + else: + proto_path = Path(proto) + builder = builder.with_file(name=proto_path.name, path=proto_path) + files[proto_path.name] = proto_path + if str(model).startswith("http"): + files["config"] = builder.with_file(name=get_filename(model), url=model, size=model_size, sha256=model_sha256) + else: + model_path = Path(model) + files["config"] = builder.with_file(name=model_path.name, path=model_path).build() + files[model_path.name] = model_path + + return compile_blob(blob_name=Path(proto_name).stem, req_data={"name": Path(proto_name).stem}, req_files=files, data_type=data_type, **kwargs) -def from_onnx(model, data_type=None, optimizer_params=None, **kwargs): + +def from_onnx(model, data_type=None, optimizer_params=None, model_size=None, model_sha256=None, **kwargs): if optimizer_params is None: optimizer_params = __defaults["optimizer_params"] if data_type is None: data_type = __defaults["data_type"] - model_path = Path(model) + files = {} + model_name = get_filename(model) - config_path = ConfigBuilder()\ + builder = ConfigBuilder()\ .task_type("detection")\ .framework("onnx")\ - .with_file(model_path.name, model_path)\ .model_optimizer_args(optimizer_params + [ "--data_type={}".format(data_type), - "--input_model=$dl_dir/{}/{}".format(data_type, model_path.name), - ])\ - .build() - files = { - 'config': config_path, - model_path.name: model_path - } - body = { - "name": model_path.stem, - } + "--input_model=$dl_dir/{}/{}".format(data_type, model_name), + ]) - return compile_blob(blob_name=model_path.stem, req_data=body, req_files=files, data_type=data_type, **kwargs) + if str(model).startswith("http"): + files["config"] = builder\ + .with_file(name=get_filename(model), url=model, size=model_size, sha256=model_sha256)\ + .build() + else: + files["config"] = builder\ + .with_file(name=model_name, path=Path(model))\ + .build() + files[model_name] = Path(model) + + return compile_blob(blob_name=Path(model_name).stem, req_data={"name": Path(model_name).stem}, req_files=files, data_type=data_type, **kwargs) -def from_tf(frozen_pb, data_type=None, optimizer_params=None, **kwargs): +def from_tf(frozen_pb, data_type=None, optimizer_params=None, frozen_pb_size=None, frozen_pb_sha256=None, **kwargs): if optimizer_params is None: optimizer_params = __defaults["optimizer_params"] if data_type is None: data_type = __defaults["data_type"] - frozen_pb_path = Path(frozen_pb) + files = {} + frozen_pb_name = get_filename(frozen_pb) - config_path = ConfigBuilder()\ + builder = ConfigBuilder()\ .task_type("detection")\ .framework("tf")\ - .with_file(frozen_pb_path.name, frozen_pb_path)\ .model_optimizer_args(optimizer_params + [ "--data_type={}".format(data_type), - "--input_model=$dl_dir/{}/{}".format(data_type, frozen_pb_path.name), - ])\ - .build() - files = { - 'config': config_path, - frozen_pb_path.name: frozen_pb_path - } - body = { - "name": frozen_pb_path.stem, - } + "--input_model=$dl_dir/{}/{}".format(data_type, frozen_pb_name), + ]) - return compile_blob(blob_name=frozen_pb_path.stem, req_data=body, req_files=files, data_type=data_type, **kwargs) + if str(frozen_pb).startswith("http"): + files["config"] = builder.with_file(name=get_filename(frozen_pb), url=frozen_pb, size=frozen_pb_size, sha256=frozen_pb_sha256).build() + else: + files["config"] = builder.with_file(name=frozen_pb_name, path=Path(frozen_pb)).build() + files[frozen_pb_name] = Path(frozen_pb) + return compile_blob(blob_name=Path(frozen_pb_name).stem, req_data={"name": Path(frozen_pb_name).stem}, req_files=files, data_type=data_type, **kwargs) -def from_openvino(xml, bin, **kwargs): - xml_path = Path(xml) - bin_path = Path(bin) - bin_req_name = xml_path.with_suffix('.bin').name - config_path = ConfigBuilder()\ +def from_openvino(xml, bin, xml_size=None, xml_sha256=None, bin_size=None, bin_sha256=None, **kwargs): + files = {} + builder = ConfigBuilder()\ .task_type("detection")\ - .framework("dldt")\ - .with_file(xml_path.name, xml_path)\ - .with_file(bin_req_name, bin_path)\ - .build() - files = { - 'config': config_path, - xml_path.name: xml_path, - bin_req_name: bin_path - } + .framework("dldt") + xml_name = get_filename(xml) + bin_name = get_filename(bin) - body = { - "name": xml_path.stem, - } + if str(xml).startswith("http"): + builder = builder.with_file(name=xml_name, url=xml, size=xml_size, sha256=xml_sha256) + else: + builder = builder.with_file(name=xml_name, path=Path(xml)) + files[xml_name] = Path(xml) + + if str(bin).startswith("http"): + builder = builder.with_file(name=bin_name, url=bin, size=bin_size, sha256=bin_sha256) + else: + builder = builder.with_file(name=bin_name, path=Path(bin)) + files[bin_name] = Path(bin) - return compile_blob(blob_name=xml_path.stem, req_data=body, req_files=files, **kwargs) + files["config"] = builder.build() + return compile_blob(blob_name=Path(xml_name).stem, req_data={"name": Path(xml_name).stem}, req_files=files, **kwargs) def from_config(name, path, **kwargs): @@ -419,6 +439,7 @@ def __run_cli__(): import argparse parser = argparse.ArgumentParser() parser.add_argument('-zn', '--zoo-name', help="Name of a model to download from OpenVINO Model Zoo") + parser.add_argument('-zt', '--zoo-type', help="Type of the model zoo to use, available: \"intel\", \"depthai\" ") parser.add_argument('-onnx', '--onnx-model', help="Path to ONNX .onnx file") parser.add_argument('-cp', '--caffe-proto', help="Path to Caffe .prototxt file") parser.add_argument('-cm', '--caffe-model', help="Path to Caffe .caffemodel file") @@ -444,7 +465,7 @@ def __run_cli__(): common_args = { arg: getattr(args, arg) - for arg in ["shaves", "data_type", "output_dir", "version", "url", "compile_params", "download_ir"] + for arg in ["shaves", "data_type", "output_dir", "version", "url", "compile_params", "download_ir", "zoo_type"] } if args.zoo_list: return zoo_list() diff --git a/cli/blobconverter/test.py b/cli/blobconverter/test.py index 9df2388..c81a7d9 100644 --- a/cli/blobconverter/test.py +++ b/cli/blobconverter/test.py @@ -9,6 +9,18 @@ if not use_cache and Path(blobconverter.__defaults["output_dir"]).exists(): shutil.rmtree(blobconverter.__defaults["output_dir"]) +result = blobconverter.from_zoo(name="megadepth", zoo_type="depthai") +print(result) + +result = blobconverter.from_openvino( + xml="https://storage.openvinotoolkit.org/repositories/open_model_zoo/2021.4/models_bin/3/age-gender-recognition-retail-0013/FP16/age-gender-recognition-retail-0013.xml", + xml_size=31526, + xml_sha256="54d62ce4a3c3d7f1559a22ee9524bac41101103a8dceaabec537181995eda655", + bin="https://storage.openvinotoolkit.org/repositories/open_model_zoo/2021.4/models_bin/3/age-gender-recognition-retail-0013/FP16/age-gender-recognition-retail-0013.bin", + bin_size=4276038, + bin_sha256="3586df5340e9fcd73ba0e2d802631bd9e027179490635c03b273d33d582e2b58" +) +print(result) result = blobconverter.from_onnx( model="../../concat.onnx", diff --git a/cli/setup.py b/cli/setup.py index 2b84d98..144da01 100644 --- a/cli/setup.py +++ b/cli/setup.py @@ -4,7 +4,7 @@ setup( name='blobconverter', - version='1.1.1', + version='1.2.0', description='The tool that allows you to covert neural networks to MyriadX blob', long_description=io.open("README.md", encoding="utf-8").read(), long_description_content_type="text/markdown", diff --git a/main.py b/main.py index 69d3ad3..7b8f953 100644 --- a/main.py +++ b/main.py @@ -82,14 +82,21 @@ def __init__(self): self.downloader_path = Path(__file__).parent / Path("model_compiler/openvino_2019.3/downloader.py") self.venv_path = Path(__file__).parent / Path("venvs/venv2019_3") else: - raise ValueError(f'Unknown self.version: "{self.version}", available: "2021.4", "2021.3", "2021.2", "2021.1", "2020.4", "2020.3", "2020.2", "2020.1", "2019.R3"') + raise ValueError(f'Unknown version: "{self.version}", available: "2021.4", "2021.3", "2021.2", "2021.1", "2020.4", "2020.3", "2020.2", "2020.1", "2019.R3"') self.workdir = UPLOAD_FOLDER / Path(uuid.uuid4().hex) self.workdir.mkdir(parents=True, exist_ok=True) self.cache_path.mkdir(parents=True, exist_ok=True) self.compiler_path = self.base_path / Path("deployment_tools/inference_engine/lib/intel64/myriad_compile") - self.model_zoo_path = self.base_path / Path("deployment_tools/open_model_zoo/models") + + self.model_zoo_type = request.form.get('zoo_type', "intel") + if self.model_zoo_type == "intel": + self.model_zoo_path = self.base_path / Path("deployment_tools/open_model_zoo/models") + elif self.model_zoo_type == "depthai": + self.model_zoo_path = Path(__file__).parent / Path("depthai-model-zoo/models") + else: + raise ValueError(f'Unknown zoo name: "{self.model_zoo_type}", available: "intel", "depthai"') self.env = os.environ.copy() self.env['InferenceEngine_DIR'] = str(self.base_path / Path("deployment_tools/inference_engine/share")) @@ -187,17 +194,17 @@ def parse_config(config_path, name, data_type, env): if "source" not in file: raise BadRequest("Each file needs to have \"source\" param") if "$type" in file["source"]: - if file["source"]["$type"] == "http": + if file["source"]["$type"] == "http" and "$REQUEST" in file["source"]["url"]: local_path = file["source"]["url"].replace("$REQUEST", str((env.workdir / name / data_type).absolute())) file["source"]["url"] = "file://" + local_path - if "size" not in file: - if file["source"]["$type"] != "http" or not file["source"]["url"].startswith("file://"): - raise BadRequest("You need to supply \"size\" parameter for file when using a remote source") - file["size"] = Path(local_path).stat().st_size - if "sha256" not in file: - if file["source"]["$type"] != "http" or not file["source"]["url"].startswith("file://"): - raise BadRequest("You need to supply \"sha256\" parameter for file when using a remote source") - file["sha256"] = sha256sum(local_path) + # if "size" not in file: + # if not file["source"]["url"].startswith("file://"): + # raise BadRequest("You need to supply \"size\" parameter for file when using a remote source") + # file["size"] = Path(local_path).stat().st_size + # if "sha256" not in file: + # if not file["source"]["url"].startswith("file://"): + # raise BadRequest("You need to supply \"sha256\" parameter for file when using a remote source") + # file["sha256"] = sha256sum(local_path) with open(config_path, "w", encoding='utf8') as f: yaml.dump(config, f , default_flow_style=False, allow_unicode=True) @@ -229,7 +236,7 @@ def prepare_compile_config(shaves, env): def fetch_from_zoo(env, name): - return next(env.model_zoo_path.rglob(f'*/{name}/model.yml'), None) + return next(env.model_zoo_path.rglob(f'**/{name}/model.yml'), None) @app.route("/compile", methods=['POST']) @@ -271,12 +278,13 @@ def compile(): compile_config_path = prepare_compile_config(myriad_shaves, env) commands = [] xml_path = env.workdir / name / data_type / (name + ".xml") - if use_zoo: + if len(file_paths) == 0: commands.append( f"{env.executable} {env.downloader_path} --output_dir {env.workdir} --cache_dir {env.cache_path} --num_attempts 5 --name {name} --model_root {env.workdir}" ) - preconvert_script = env.model_zoo_path / "public" / name / "pre-convert.py" - if preconvert_script.exists(): + if use_zoo: + preconvert_script = next(env.model_zoo_path.rglob(f"**/{name}/pre-convert.py"), None) + if preconvert_script is not None: commands.append( f"{env.executable} {preconvert_script} {env.workdir / name} {env.workdir / name}" ) diff --git a/setup_container.py b/setup_container.py index 1ecfa3d..99f10e0 100644 --- a/setup_container.py +++ b/setup_container.py @@ -9,6 +9,9 @@ "2021_2": Path("/opt/intel/openvino2021_2"), "2021_1": Path("/opt/intel/openvino2021_1"), "2020_4": Path("/opt/intel/openvino2020_4"), +} + +legacy = { "2020_3": Path("/opt/intel/openvino2020_3"), "2020_2": Path("/opt/intel/openvino2020_2"), "2020_1": Path("/opt/intel/openvino2020_1"), @@ -22,7 +25,7 @@ def abs_str(path: Path): return str(path.absolute()) -def create_venv(name: str, path: Path): +def create_venv(name: str, path: Path, interpreter): req_path = path / "deployment_tools" / "model_optimizer" / "requirements.txt" venv_path = Path("/app") / "venvs" / ("venv"+name) venv_python_path = venv_path / "bin" / "python" @@ -32,7 +35,7 @@ def create_venv(name: str, path: Path): del new_env["PYTHONHOME"] new_env["VIRTUAL_ENV"] = abs_str(venv_path) new_env["PATH"] = abs_str(venv_path / "bin") + ":" + new_env["PATH"] - subprocess.check_call([sys.executable, "-m", "venv", abs_str(venv_path)]) + subprocess.check_call([interpreter, "-m", "venv", abs_str(venv_path)]) subprocess.check_call([abs_str(venv_python_path), "-m", "pip", "install", "-U", "pip"], env=new_env) subprocess.check_call([abs_str(venv_python_path), "-m", "pip", "install", "-r", abs_str(req_path)], env=new_env) subprocess.check_call([abs_str(venv_python_path), "-m", "pip", "install", *additional_packages], env=new_env) @@ -40,5 +43,7 @@ def create_venv(name: str, path: Path): if __name__ == "__main__": for env_name, base_path in versions.items(): - create_venv(env_name, base_path) + create_venv(env_name, base_path, "python3.8") + for env_name, base_path in legacy.items(): + create_venv(env_name, base_path, "python3.7")