diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 00000000..b423033f --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,3 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$ \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 88136b1b..a94cb2f8 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1 @@ -imaspy/_version.py export-subst +.git_archival.txt export-subst diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml new file mode 100644 index 00000000..2bb329f6 --- /dev/null +++ b/.github/workflows/linting.yml @@ -0,0 +1,31 @@ +name: imas-python + +on: push + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - name: Checkout imas-python sources + uses: actions/checkout@v4 + with: + ref: rename-imaspy-to-imas + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + + - name: Display Python version + run: python -c "import sys; print(sys.version)" + + - name: Install the code linting and formatting tools + run: pip install --upgrade 'black >=24,<25' flake8 + + - name: Check formatting of code with black + run: black --check imas + + - name: Check linting with flake8 + run: flake8 imas diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..3623ebab --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,67 @@ +name: build-wheel-and-publish-test-pypi + +on: push + +jobs: + build: + name: Build distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.x" + - name: Install pypa/build + run: >- + python3 -m pip install pip setuptools wheel build + - name: Build a binary wheel and a source tarball + run: python3 -m build . + - name: Store the distribution packages + uses: actions/upload-artifact@v4 + with: + name: python-package-distributions + path: dist/ + + publish-to-pypi: + name: Publish imas-python distribution to PyPI + needs: + - build + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/imas-python + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + + publish-to-testpypi: + name: Publish imas-python distribution to TestPyPI + needs: + - build + runs-on: ubuntu-latest + environment: + name: testpypi + url: https://test.pypi.org/p/imas-python + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution to TestPyPI + uses: pypa/gh-action-pypi-publish@unstable/v1 + with: + repository-url: https://test.pypi.org/legacy/ + verbose: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index 6a19e008..8bff5e88 100644 --- a/.gitignore +++ b/.gitignore @@ -91,21 +91,16 @@ ENV/ *.swo # SCM setuptools -imaspy/version.py - -# Saxon symlink or downloaded file -saxon*.jar +imas/_version.py # IMAS DD data-dictionary -access-layer -containers/arch/imaspy/ +containers/arch/imas/ containers/arch/data-dictionary/ -containers/arch/access-layer/ -imaspy/assets/IDSDef.zip +imas/assets/IDSDef.zip # IDS files -*.ids +# *.ids # ASV folder /.asv diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000..426920c7 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,23 @@ +version: 2 + +build: + os: "ubuntu-22.04" + tools: + python: "3.11" + jobs: + post_checkout: + - git fetch --unshallow || true + +python: + install: + - method: pip + path: . + extra_requirements: + - docs + - netcdf + - h5py + +sphinx: + builder: html + configuration: docs/source/conf.py + fail_on_warning: false \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in index b002b31c..9c3b7df1 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ -include imaspy/assets/IDSDef.zip -include imaspy/assets/IDSDef2MDSpreTree.xsl -include imaspy/assets/ITER_134173_106_equilibrium.ids -include imaspy/assets/ITER_134173_106_core_profiles.ids -include imaspy/assets/equilibrium.ids -include imaspy/assets/core_profiles.ids +include imas/assets/IDSDef.zip +include imas/assets/IDSDef2MDSpreTree.xsl +include imas/assets/ITER_134173_106_equilibrium.ids +include imas/assets/ITER_134173_106_core_profiles.ids +include imas/assets/equilibrium.ids +include imas/assets/core_profiles.ids diff --git a/README.md b/README.md index 14d4b81e..c2afa4ad 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ -# IMAS-Python +# imas-python -IMAS-Python is a pure-python library to handle arbitrarily nested data structures. -It is designed for, but not necessarily bound to, interacting with Interface +imas-python is a pure-python library to handle arbitrarily nested data structures. +imas-python is designed for, but not necessarily bound to, interacting with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. @@ -11,7 +11,7 @@ Data Model. Install steps are described in the documentation generated from `/docs/source/installing.rst`. Documentation is autogenerated from the source using [Sphinx](http://sphinx-doc.org/) -and can be found at the [ITER sharepoint](https://sharepoint.iter.org/departments/POP/CM/IMDesign/Code%20Documentation/IMAS-doc/index.html) +and can be found at the [readthedocs](https://imas-python.readthedocs.io/en/latest/) The documentation can be manually generated by installing sphinx and running: @@ -40,7 +40,7 @@ A quick 5 minutes introduction is available in the documentation generated from ## Legal -IMAS-Python is Copyright 2020-2024 ITER Organization, Copyright 2020-2023 Karel Lucas van de +imas-python is Copyright 2020-2024 ITER Organization, Copyright 2020-2023 Karel Lucas van de Plassche , Copyright 2020-2022 Daan van Vugt , and Copyright 2020 Dutch Institute for Fundamental Energy Research . It is licensed under [LGPL 3.0](LICENSE.txt). diff --git a/asv.conf.json b/asv.conf.json index b10c743a..0b11cf72 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -4,10 +4,10 @@ "version": 1, // The name of the project being benchmarked - "project": "imaspy", + "project": "imas", // The project's homepage - "project_url": "https://git.iter.org/projects/IMAS/repos/imaspy/browse", + "project_url": "https://github.com/iterorganization/imas-python", // The URL or local path of the source code repository for the // project being benchmarked @@ -53,7 +53,7 @@ //"install_timeout": 600, // the base URL to show a commit for the project. - "show_commit_url": "https://git.iter.org/projects/IMAS/repos/imaspy/commits/", + "show_commit_url": "https://github.com/iterorganization/imas-python/commits/main/", // The Pythons you'd like to test against. If not provided, defaults // to the current version of Python used to run `asv`. diff --git a/benchmarks/core_profiles.py b/benchmarks/core_profiles.py index 743cd12e..047b5afc 100644 --- a/benchmarks/core_profiles.py +++ b/benchmarks/core_profiles.py @@ -3,7 +3,7 @@ import numpy as np -import imaspy +import imas from .utils import ( available_backends, @@ -22,16 +22,16 @@ def fill_slices(core_profiles, times): """Fill a time slice of a core_profiles IDS with generated data. Args: - core_profiles: core_profiles IDS (either from IMASPy or AL HLI) + core_profiles: core_profiles IDS (either from imas-python or AL HLI) times: time values to fill a slice for """ core_profiles.ids_properties.homogeneous_time = 1 # HOMOGENEOUS - core_profiles.ids_properties.comment = "Generated for the IMASPy benchmark suite" + core_profiles.ids_properties.comment = "Generated for the imas-python benchmark suite" core_profiles.ids_properties.creation_date = datetime.date.today().isoformat() - core_profiles.code.name = "IMASPy ASV benchmark" - core_profiles.code.version = imaspy.__version__ + core_profiles.code.name = "imas-python ASV benchmark" + core_profiles.code.version = imas.__version__ core_profiles.code.repository = ( - "https://git.iter.org/projects/IMAS/repos/imaspy/browse" + "https://github.com/iterorganization/imas-python" ) core_profiles.time = np.array(times) @@ -50,7 +50,14 @@ def fill_slices(core_profiles, times): profiles_1d.ion.resize(len(ions)) profiles_1d.neutral.resize(len(ions)) for i, ion in enumerate(ions): - profiles_1d.ion[i].label = profiles_1d.neutral[i].label = ion + if hasattr(profiles_1d.ion[i], 'label'): + profiles_1d.ion[i].label = ion + profiles_1d.neutral[i].label = ion + if hasattr(profiles_1d.ion[i], 'name'): + profiles_1d.ion[i].name = ion + profiles_1d.neutral[i].name = ion + + # profiles_1d.ion[i].label = profiles_1d.neutral[i].label = ion profiles_1d.ion[i].z_ion = 1.0 profiles_1d.ion[i].neutral_index = profiles_1d.neutral[i].ion_index = i + 1 @@ -74,7 +81,7 @@ def setup(self, hli, backend): def time_get_slice(self, hli, backend): for t in TIME: - self.dbentry.get_slice("core_profiles", t, imaspy.ids_defs.CLOSEST_INTERP) + self.dbentry.get_slice("core_profiles", t, imas.ids_defs.CLOSEST_INTERP) def teardown(self, hli, backend): if hasattr(self, "dbentry"): # imas + netCDF has no dbentry @@ -96,8 +103,8 @@ class LazyGet: param_names = ["lazy", "backend"] def setup(self, lazy, backend): - self.dbentry = create_dbentry("imaspy", backend) - core_profiles = factory["imaspy"].core_profiles() + self.dbentry = create_dbentry("imas", backend) + core_profiles = factory["imas"].core_profiles() fill_slices(core_profiles, TIME) self.dbentry.put(core_profiles) diff --git a/benchmarks/edge_profiles.py b/benchmarks/edge_profiles.py index 87ff2b51..c2f69c28 100644 --- a/benchmarks/edge_profiles.py +++ b/benchmarks/edge_profiles.py @@ -3,7 +3,7 @@ import numpy as np -import imaspy +import imas from .utils import available_backends, create_dbentry, factory, hlis @@ -17,18 +17,18 @@ def fill_ggd(edge_profiles, times): """Fill nested arrays of structures in grids_ggd and ggd substructures. Args: - edge_profiles: edge_profiles IDS object (either from IMASPy or AL HLI) + edge_profiles: edge_profiles IDS object (either from imas-python or AL HLI) times: time values to fill """ edge_profiles.ids_properties.homogeneous_time = ( - imaspy.ids_defs.IDS_TIME_MODE_HETEROGENEOUS + imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS ) - edge_profiles.ids_properties.comment = "Generated for IMASPy benchmark suite" + edge_profiles.ids_properties.comment = "Generated for imas-python benchmark suite" edge_profiles.ids_properties.creation_date = datetime.date.today().isoformat() - edge_profiles.code.name = "IMASPy ASV benchmark" - edge_profiles.code.version = imaspy.__version__ + edge_profiles.code.name = "imas-python ASV benchmark" + edge_profiles.code.version = imas.__version__ edge_profiles.code.repository = ( - "https://git.iter.org/projects/IMAS/repos/imaspy/browse" + "https://github.com/iterorganization/imas-python" ) # This GGD grid is not a valid description, but it's a good stress test for the @@ -46,7 +46,13 @@ def fill_ggd(edge_profiles, times): grid.space[i].identifier.index = 1 grid.space[i].identifier.description = "Description...." grid.space[i].geometry_type.index = 0 - grid.space[0].coordinates_type = np.array([4, 5], dtype=np.int32) + grid.space[0].coordinates_type.resize(1) + if imas.__version__ >= "4.0.0": + grid.space[0].coordinates_type = np.array([4, 5], dtype=np.int32) + else: + grid.space[0].coordinates_type[0].name = "coordinates type" + grid.space[0].coordinates_type[0].index = 0 + grid.space[0].coordinates_type[0].name = "example coordinates type" grid.space[0].objects_per_dimension.resize(3) # points, lines, surfaces points = grid.space[0].objects_per_dimension[0].object points.resize(N_POINTS) @@ -61,7 +67,13 @@ def fill_ggd(edge_profiles, times): for i in range(N_SURFACES): surfaces[i].nodes = np.random.randint(1, N_LINES + 1, 4, dtype=np.int32) - grid.space[1].coordinates_type = np.array([6], dtype=np.int32) + grid.space[1].coordinates_type.resize(1) + if imas.__version__ >= "4.0.0": + grid.space[1].coordinates_type = np.array([6], dtype=np.int32) + else: + grid.space[1].coordinates_type[0].name = "coordinates type" + grid.space[1].coordinates_type[0].index = 0 + grid.space[1].coordinates_type[0].name = "example coordinates type" grid.space[1].objects_per_dimension.resize(2) obp = grid.space[1].objects_per_dimension[0] obp.object.resize(2) diff --git a/benchmarks/technical.py b/benchmarks/technical.py index 59072d48..1bbf3a48 100644 --- a/benchmarks/technical.py +++ b/benchmarks/technical.py @@ -1,18 +1,11 @@ -import imaspy import imas -def timeraw_create_default_imaspy_factory(): +def timeraw_create_default_imas_factory(): # timeraw to ensure that nothing is cached return """ - import imaspy - imaspy.IDSFactory() - """ - - -def timeraw_import_imaspy(): - return """ - import imaspy + import imas + imas.IDSFactory() """ @@ -38,6 +31,6 @@ def track_imas_versions(): ) -def track_imaspy_dd_version(): - return imaspy.IDSFactory().version +def track_imas_dd_version(): + return imas.IDSFactory().version """ diff --git a/benchmarks/utils.py b/benchmarks/utils.py index 5a8beeb8..0d2a9958 100644 --- a/benchmarks/utils.py +++ b/benchmarks/utils.py @@ -3,12 +3,8 @@ import uuid from pathlib import Path -import imaspy -import imaspy.exception - -# Don't directly import imas: code analyzers break on the huge code base -imas = importlib.import_module("imas") - +import imas +import imas.exception # Backend constants HDF5 = "HDF5" @@ -28,11 +24,11 @@ def backend_exists(backend): """Tries to detect if the lowlevel has support for the given backend.""" uri = create_uri(backend, str(uuid.uuid4())) try: - entry = imaspy.DBEntry(uri, "r") + entry = imas.DBEntry(uri, "r") except Exception as exc: if "backend is not available" in str(exc): return False - elif isinstance(exc, (imaspy.exception.ALException, FileNotFoundError)): + elif isinstance(exc, (imas.exception.ALException, FileNotFoundError)): return True return True # Highly unlikely, but it could succeed without error @@ -60,32 +56,27 @@ def backend_exists(backend): backend for backend in available_backends if backend not in [ASCII, NETCDF] ] -hlis = ["imas", "imaspy"] +hlis = ["imas"] DBEntry = { "imas": imas.DBEntry, - "imaspy": imaspy.DBEntry, } factory = { - "imas": imas, - "imaspy": imaspy.IDSFactory(), + "imas": imas.IDSFactory(), } -available_serializers = [imaspy.ids_defs.ASCII_SERIALIZER_PROTOCOL] +available_serializers = [imas.ids_defs.ASCII_SERIALIZER_PROTOCOL] def create_dbentry(hli, backend): if backend == NETCDF: - if hli == "imas": - # Raising NotImplementedError will skip the benchmarks for this combination - raise NotImplementedError("AL-Python HLI doesn't implement netCDF.") - if hli == "imaspy": # check if netcdf backend is available + if hli == "imas": # check if netcdf backend is available try: assert ( - imaspy.DBEntry._select_implementation("x.nc").__name__ + imas.DBEntry._select_implementation("x.nc").__name__ == "NCDBEntryImpl" ) except (AttributeError, AssertionError): raise NotImplementedError( - "This version of IMASPy doesn't implement netCDF." + "This version of imas-python doesn't implement netCDF." ) from None path = Path.cwd() / f"DB-{hli}-{backend}" diff --git a/ci/build_dd_zip.sh b/ci/build_dd_zip.sh index a4f14683..8f704d98 100755 --- a/ci/build_dd_zip.sh +++ b/ci/build_dd_zip.sh @@ -17,4 +17,4 @@ echo "Done loading modules" set -x # Build the DD zip -python imaspy/dd_helpers.py +python imas/dd_helpers.py diff --git a/ci/build_docs_and_dist.sh b/ci/build_docs_and_dist.sh index 2ef4c12a..a83ed031 100755 --- a/ci/build_docs_and_dist.sh +++ b/ci/build_docs_and_dist.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Bamboo CI script to install imaspy and run all tests +# Bamboo CI script to install imas and run all tests # Note: this script should be run from the root of the git repository # Debuggging: @@ -27,7 +27,7 @@ pip install --upgrade pip setuptools wheel build rm -rf dist python -m build . -# Install imaspy and documentation dependencies from the just-built wheel +# Install imas and documentation dependencies from the just-built wheel pip install "`readlink -f dist/*.whl`[docs,netcdf]" # Debugging: diff --git a/ci/linting.sh b/ci/linting.sh index a66eacaf..415ad1bf 100755 --- a/ci/linting.sh +++ b/ci/linting.sh @@ -24,5 +24,5 @@ python -m venv venv # Install and run linters pip install --upgrade 'black >=24,<25' flake8 -black --check imaspy -flake8 imaspy +black --check imas +flake8 imas diff --git a/ci/run_benchmark.sh b/ci/run_benchmark.sh index 74783585..022804fd 100755 --- a/ci/run_benchmark.sh +++ b/ci/run_benchmark.sh @@ -1,16 +1,24 @@ #!/bin/bash -# Bamboo CI script to install imaspy and run all tests +# Bamboo CI script to install imas and run all tests # Note: this script should be run from the root of the git repository # Debuggging: -set -e -o pipefail + echo "Loading modules:" $@ +BENCHMARKS_DIR=$(realpath "$PWD/imas_benchmarks") +if [[ "$(uname -n)" == *"bamboo"* ]]; then + set -e -o pipefail + # create + BENCHMARKS_DIR=$(realpath "/mnt/bamboo_deploy/imas/benchmarks/") +fi # Set up environment such that module files can be loaded source /etc/profile.d/modules.sh module purge # Modules are supplied as arguments in the CI job: -module load $@ +# IMAS-AL-Python/5.2.1-intel-2023b-DD-3.41.0 Saxon-HE/12.4-Java-21 +module load IMAS-AL-Core/5.4.3-intel-2023b Saxon-HE/12.4-Java-21 + # Debuggging: echo "Done loading modules" @@ -24,17 +32,17 @@ rm -rf venv # Environment should be clean, but remove directory to be sure python -m venv venv source venv/bin/activate -# Install asv and imaspy +# Install asv and imas pip install --upgrade pip setuptools wheel pip install virtualenv .[test] # Generate MDS+ models cache -python -c 'import imaspy.backends.imas_core.mdsplus_model; print(imaspy.backends.imas_core.mdsplus_model.mdsplus_model_dir(imaspy.IDSFactory()))' +python -c 'import imas.backends.imas_core.mdsplus_model; print(imas.backends.imas_core.mdsplus_model.mdsplus_model_dir(imas.IDSFactory()))' # Copy previous results (if any) -mkdir -p /mnt/bamboo_deploy/imaspy/benchmarks/results +mkdir -p "$BENCHMARKS_DIR/results" mkdir -p .asv -cp -rf /mnt/bamboo_deploy/imaspy/benchmarks/results .asv/ +cp -rf "$BENCHMARKS_DIR/results" .asv/ # Ensure numpy won't do multi-threading export OPENBLAS_NUM_THREADS=1 @@ -47,7 +55,7 @@ asv machine --yes # Run ASV for the current commit, develop and main asv run --skip-existing-successful HEAD^! asv run --skip-existing-successful develop^! -asv run --skip-existing-successful main^! +# asv run --skip-existing-successful main^! # Compare results if [ `git rev-parse --abbrev-ref HEAD` == develop ] @@ -61,5 +69,5 @@ fi asv publish # And persistently store them -cp -rf .asv/{results,html} /mnt/bamboo_deploy/imaspy/benchmarks/ +cp -rf .asv/{results,html} "$BENCHMARKS_DIR" diff --git a/ci/run_pytest.sh b/ci/run_pytest.sh index 9579e635..7b204bf8 100755 --- a/ci/run_pytest.sh +++ b/ci/run_pytest.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Bamboo CI script to install imaspy and run all tests +# Bamboo CI script to install imas and run all tests # Note: this script should be run from the root of the git repository # Debuggging: @@ -22,7 +22,7 @@ rm -rf venv # Environment should be clean, but remove directory to be sure python -m venv venv source venv/bin/activate -# Install imaspy and test dependencies +# Install imas and test dependencies pip install --upgrade pip setuptools wheel pip install .[h5py,netcdf,test] @@ -34,4 +34,4 @@ pip freeze rm -f junit.xml rm -rf htmlcov -python -m pytest -n=auto --cov=imaspy --cov-report=term-missing --cov-report=html --junit-xml=junit.xml +python -m pytest -n=auto --cov=imas --cov-report=term-missing --cov-report=html --junit-xml=junit.xml diff --git a/conftest.py b/conftest.py index 20b26679..a7eb12b1 100644 --- a/conftest.py +++ b/conftest.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. # # Set up pytest: # - Backend parametrization (and corresponding command line options) @@ -22,19 +22,19 @@ import pytest from packaging.version import Version -from imaspy.backends.imas_core.imas_interface import has_imas as _has_imas -from imaspy.backends.imas_core.imas_interface import ll_interface, lowlevel -from imaspy.dd_zip import dd_etree, dd_xml_versions, latest_dd_version -from imaspy.ids_defs import ( +from imas.backends.imas_core.imas_interface import has_imas as _has_imas +from imas.backends.imas_core.imas_interface import ll_interface, lowlevel +from imas.dd_zip import dd_etree, dd_xml_versions, latest_dd_version +from imas.ids_defs import ( ASCII_BACKEND, HDF5_BACKEND, IDS_TIME_MODE_INDEPENDENT, MDSPLUS_BACKEND, MEMORY_BACKEND, ) -from imaspy.ids_factory import IDSFactory +from imas.ids_factory import IDSFactory -logger = logging.getLogger("imaspy") +logger = logging.getLogger("imas") logger.setLevel(logging.INFO) os.environ["IMAS_AL_DISABLE_VALIDATE"] = "1" @@ -136,33 +136,33 @@ def latest_factory3(): # Fixtures for various assets @pytest.fixture() -def imaspy_assets(): - return files("imaspy") / "assets" +def imas_assets(): + return files("imas") / "assets" @pytest.fixture() -def fake_toplevel_xml(imaspy_assets): - return imaspy_assets / "IDS_fake_toplevel.xml" +def fake_toplevel_xml(imas_assets): + return imas_assets / "IDS_fake_toplevel.xml" @pytest.fixture() -def ids_minimal(imaspy_assets): - return imaspy_assets / "IDS_minimal.xml" +def ids_minimal(imas_assets): + return imas_assets / "IDS_minimal.xml" @pytest.fixture() -def ids_minimal2(imaspy_assets): - return imaspy_assets / "IDS_minimal_2.xml" +def ids_minimal2(imas_assets): + return imas_assets / "IDS_minimal_2.xml" @pytest.fixture() -def ids_minimal_struct_array(imaspy_assets): - return imaspy_assets / "IDS_minimal_struct_array.xml" +def ids_minimal_struct_array(imas_assets): + return imas_assets / "IDS_minimal_struct_array.xml" @pytest.fixture() -def ids_minimal_types(imaspy_assets): - return imaspy_assets / "IDS_minimal_types.xml" +def ids_minimal_types(imas_assets): + return imas_assets / "IDS_minimal_types.xml" @pytest.fixture diff --git a/docs/Makefile b/docs/Makefile index 6f98ead9..f0c27f01 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -29,7 +29,7 @@ clean: Makefile # This recipe generates source files, so put result in source # Seems to overwrite autosummary documentation though! So not using this rn -MODULE_EXCLUDE="../imaspy/examples/**" "../imas/**" +MODULE_EXCLUDE="../imas/examples/**" "../imas/**" apidocs: Makefile sphinx-apidoc --implicit-namespaces -o "$(GENERATEDDIR)" "$(PROJECT_ROOT)/imaspy/" $(MODULE_EXCLUDE) diff --git a/docs/source/_static/imaspy.css b/docs/source/_static/imas.css similarity index 100% rename from docs/source/_static/imaspy.css rename to docs/source/_static/imas.css diff --git a/docs/source/_static/imaspy_200x200.png b/docs/source/_static/imas_200x200.png similarity index 100% rename from docs/source/_static/imaspy_200x200.png rename to docs/source/_static/imas_200x200.png diff --git a/docs/source/api-hidden.rst b/docs/source/api-hidden.rst index 41595482..a437b34f 100644 --- a/docs/source/api-hidden.rst +++ b/docs/source/api-hidden.rst @@ -11,4 +11,4 @@ API autosummary :recursive: :template: custom-module-template.rst - imaspy + imas diff --git a/docs/source/api.rst b/docs/source/api.rst index 143ee1fe..87be0471 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -1,16 +1,16 @@ -.. currentmodule:: imaspy +.. currentmodule:: imas API reference ============= -This page provides an auto-generated summary of IMASPy's API. For more details +This page provides an auto-generated summary of imas-python's API. For more details and examples, refer to the relevant chapters in the main part of the documentation. -IMASPy IDS manipulation -------------------------- +imas-python IDS manipulation +---------------------------- -.. currentmodule:: imaspy +.. currentmodule:: imas .. autosummary:: diff --git a/docs/source/benchmarking.rst b/docs/source/benchmarking.rst index 4588b173..ce7b0090 100644 --- a/docs/source/benchmarking.rst +++ b/docs/source/benchmarking.rst @@ -1,33 +1,33 @@ -.. _`benchmarking IMASPY`: +.. _`benchmarking IMAS`: -Benchmarking IMASPy -=================== +Benchmarking imas-python +======================== -IMASPy integrates with the `airspeed velocity +imas-python integrates with the `airspeed velocity `_ ``asv`` package for benchmarking. -IMASPy benchmarks ------------------ +imas-python benchmarks +---------------------- -IMASPy benchmarks are stored in the ``benchmarks`` folder in the git repository. We can +imas-python benchmarks are stored in the ``benchmarks`` folder in the git repository. We can currently distinguish three types of benchmarks: Technical benchmarks These are for benchmarking features not directly connected to user-interfacing - functionality. For example benchmarking the time it takes to import the imaspy + functionality. For example benchmarking the time it takes to import the imas package. Basic functional benchmarks These are for benchmarking functionality with an equivalent feature in the IMAS - Access Layer HLI. In addition to tracking the performance of the IMASPy features + Access Layer HLI. In addition to tracking the performance of the imas-python features over time, we can also benchmark the performance against the traditional HLI. For example: putting and getting IDSs. -IMASPy-specific functional benchmarks +imas-python-specific functional benchmarks These are for benchmarking functionality without an equivalent feature in the IMAS - Access Layer HLI. We use these for tracking the IMASPy performance over time. + Access Layer HLI. We use these for tracking the imas-python performance over time. For example: data conversion between DD versions. @@ -35,7 +35,7 @@ IMASPy-specific functional benchmarks Running benchmarks (quick) -------------------------- -When you have an existing IMASPy installation, you can run the benchmarks like this: +When you have an existing imas-python installation, you can run the benchmarks like this: .. code-block:: console @@ -67,12 +67,12 @@ contains tabular results. Some examples: hli -------- ------------ imas 22.9±0.4μs - imaspy 408±8μs + imas 408±8μs ======== ============ Here we see the benchmark ``core_profiles.Generate.time_create_core_profiles`` was repeated for multiple values of ``hli``: once for the ``imas`` HLI, and once for the -``imaspy`` HLI. +``imas`` HLI. Some benchmarks are parametrized in multiple dimensions, as in below example. This results in a 2D table of results. @@ -87,7 +87,7 @@ results in a 2D table of results. hli 13 14 11 ======== ========== ============ ========= imas 75.1±1ms 70.2±0.5ms 207±2ms - imaspy 241±4ms 229±2ms 364±6ms + imas 241±4ms 229±2ms 364±6ms ======== ========== ============ ========= .. note:: @@ -103,8 +103,8 @@ Running benchmarks (advanced) ----------------------------- Running benchmarks quickly, as explained in the previous section, is great during -development and for comparing the performance of IMASPy against the imas HLI. However, -``asv`` can also track the performance of benchmarks over various commits of IMASPy. +development and for comparing the performance of imas-python against the imas HLI. However, +``asv`` can also track the performance of benchmarks over various commits of imas-python. Unfortunately this is a bit more tricky to set up. @@ -112,7 +112,7 @@ Setup advanced benchmarking ''''''''''''''''''''''''''' First, some background on how ``asv`` tracks performance: it creates an isolated virtual -environment (using the ``virtualenv`` package) and installs IMASPy for each commit that +environment (using the ``virtualenv`` package) and installs imas-python for each commit that will be benchmarked. However, because the virtual environment is isolated, the ``imas`` package won't be available. We need to work around it by setting the environment variable ``ASV_PYTHONPATH``: @@ -125,8 +125,8 @@ variable ``ASV_PYTHONPATH``: .. caution:: - ``imaspy`` must not be available on the ``ASV_PYTHONPATH`` to avoid the interfering - of two imaspy modules (one on the ``PYTHONPATH``, and the other installed by ``asv`` + ``imas`` must not be available on the ``ASV_PYTHONPATH`` to avoid the interfering + of two imas modules (one on the ``PYTHONPATH``, and the other installed by ``asv`` in the virtual environment). @@ -171,7 +171,7 @@ Instead, you can submit a benchmark job to the compute nodes. #!/bin/bash # Set SLURM options: - #SBATCH --job-name=IMASPy-benchmark + #SBATCH --job-name=imas-python-benchmark #SBATCH --time=1:00:00 #SBATCH --partition=gen10_ib # Note: for proper benchmarking we need to exclusively reserve a node, even though @@ -199,7 +199,7 @@ Instead, you can submit a benchmark job to the compute nodes. echo # Activate the virtual environment which has asv installed - . venv_imaspy/bin/activate + . venv_imas/bin/activate # Setup asv machine (using default values) asv machine --yes diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 2601639a..d3a4ef93 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -3,8 +3,8 @@ Changelog ========= -What's new in IMASPy 1.1.1 --------------------------- +What's new in imas-python 1.1.1 +------------------------------- This is a small release that mainly fixes issues related to the recent Data Dictionary 4.0.0 release. @@ -19,7 +19,7 @@ Bug fixes Dictionary 4.0.0 and 3.42.0. In other cases, the Data Dictionary version is now explicitly indicated. -- :issue:`IMAS-5560`: Fix a bug where IMASPy would not correctly recognize that +- :issue:`IMAS-5560`: Fix a bug where imas-python would not correctly recognize that the UDA backend is used. - :issue:`IMAS-5541`: Fix a bug when converting a closed contour to Data Dictionary version 4.0.0. @@ -29,15 +29,15 @@ Bug fixes recent Data Dictionary version than the on-disk data was stored with. -What's new in IMASPy 1.1 ------------------------- +What's new in imas-python 1.1 +----------------------------- New features '''''''''''' - :ref:`1.1/improved performance`. - :ref:`1.1/improved conversion`. -- IMASPy 1.1 adds support for Identifiers defined by the Data Dictionary. This +- imas-python 1.1 adds support for Identifiers defined by the Data Dictionary. This functionality is described in detail in :ref:`Identifiers`. - Support for the new :py:const:`~imaspy.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` that is @@ -52,7 +52,7 @@ New features netCDF file, which can be used for sharing and/or archiving data. This feature is in `preview` status, meaning that it may change in upcoming - minor releases of IMASPy. + minor releases of imas-python. - Additional utility functions in :py:mod:`imaspy.util`: @@ -71,18 +71,18 @@ New features - :py:func:`imaspy.util.get_data_dictionary_version` returns the Data Dictionary version for which an IDS was created. -- Add support for IMAS Access Layer Core 5.2 and later. IMASPy can now be used +- Add support for IMAS Access Layer Core 5.2 and later. imas-python can now be used with just the Access Layer Core package available, the full AL-Python HLI is no longer required. Since the Access Layer Core is now installable with ``pip`` as well (requires access to the git repository on - ``__), you can install - ``imaspy`` and ``imas_core`` in one go with: + ``__), you can install + ``imas`` and ``imas_core`` in one go with: .. code-block:: bash - pip install 'imaspy[imas-core] @ git+ssh://git@git.iter.org/imas/imaspy.git' + pip install 'imas[imas-core] @ git+ssh://git@github.com/iterorganization/imas-core.git' - A diff tool for IDSs: :py:func:`imaspy.util.idsdiff`. - Implement ``==`` equality checking for IDS Structures and Arrays of Structures @@ -91,16 +91,16 @@ New features backend. During a :py:meth:`~imaspy.db_entry.DBEntry.get` or - :py:meth:`~imaspy.db_entry.DBEntry.get_slice`, IMASPy first reads the version + :py:meth:`~imaspy.db_entry.DBEntry.get_slice`, imas-python first reads the version of the Data Dictionary that was used to store the IDS. When this version is - not known to IMASPy, an error is raised. This error can now be ignored by + not known to imas-python, an error is raised. This error can now be ignored by setting the parameter :py:param:`~imaspy.db_entry.DBEntry.get.ignore_unknown_dd_version` to - ``True``, and IMASPy will do its best to load the data anyway. + ``True``, and imas-python will do its best to load the data anyway. - A new command line tool exists for analyzing which Data Dictionary fields are used in provided Data Entries. This tool is explained in detail in - :ref:`IMASPy Data Entry analysis`. + :ref:`imas-python Data Entry analysis`. - Various improvements to the documentation were made. @@ -110,7 +110,7 @@ Breaking changes .. note:: - We attempt to keep the public API of IMASPy stable with minor releases. The + We attempt to keep the public API of imas-python stable with minor releases. The following breaking change is the result of an upgrade of the IMAS Access Layer. - Starting with Access Layer 5.2 or newer, the Access Layer will raise @@ -121,8 +121,8 @@ Breaking changes You may need to update the :py:class:`Exception` classes in ``try/except`` blocks to the new Exception classes raised by ``imas_core``. - When using an older version of the Access Layer, the behaviour of IMASPy is no - different than in IMASPy 1.0. + When using an older version of the Access Layer, the behaviour of imas-python is no + different than in imas-python 1.0. Bug fixes @@ -136,10 +136,10 @@ Bug fixes - Fixed a bug with :py:func:`~imaspy.ids_toplevel.IDSToplevel.serialize` when the IDS is in a non-default Data Dictionary version. - Fixed a bug when assigning ``nan`` to a FLT_0D, which would lead to a - confusing and incorrect log message in IMASPy 1.0. -- Fixed incorrect oldest supported DD version. Previously IMASPy indicated that + confusing and incorrect log message in imas-python 1.0. +- Fixed incorrect oldest supported DD version. Previously imas-python indicated that DD ``3.21.1`` was supported, however ``3.22.0`` is the oldest Data Dictionary - tested (and provided) with IMASPy. :py:attr:`imaspy.OLDEST_SUPPORTED_VERSION` + tested (and provided) with imas-python. :py:attr:`imaspy.OLDEST_SUPPORTED_VERSION` has been updated to reflect this. - Fixed a bug when using numpy functions, such as :external:py:func:`numpy.isclose` on scalar numbers. Previously an error was @@ -158,11 +158,11 @@ Improved performance '''''''''''''''''''' - Improved performance of :py:meth:`~imaspy.ids_toplevel.IDSToplevel.validate`. -- Improved creation of IMASPy IDS objects. This made filling IDSs and loading +- Improved creation of imas-python IDS objects. This made filling IDSs and loading them with :py:meth:`~imaspy.db_entry.DBEntry.get` / :py:meth:`~imaspy.db_entry.DBEntry.get_slice` 10-20% faster. - Improved the performance of lazy loading. This is most noticeable with the - ``HDF5`` backend, which is now up to 40x faster than with IMASPy 1.0. + ``HDF5`` backend, which is now up to 40x faster than with imas-python 1.0. - Improved the performance of :py:meth:`~imaspy.db_entry.DBEntry.get` / :py:meth:`~imaspy.db_entry.DBEntry.get_slice` / :py:meth:`~imaspy.db_entry.DBEntry.put` / @@ -180,9 +180,9 @@ Converting IDSs between Data Dictionary versions has several improvements for recent DD versions. Further details on IDS conversion can be found in :ref:`Conversion of IDSs between DD versions`. -- The IMASPy Command Line Interface for converting Data Entries between different +- The imas-python Command Line Interface for converting Data Entries between different versions of the Data Dictionary has been improved. See :ref:`Command line tool - reference` or execute ``imaspy convert --help`` in a shell for further + reference` or execute ``imas convert --help`` in a shell for further details. - Add support for multiple renames in an IDS' path. @@ -190,7 +190,7 @@ recent DD versions. Further details on IDS conversion can be found in For example, in the ``pulse_schedule`` IDS, the node ``ec/beam/power_launched/reference`` in Data Dictionary ``3.40.0`` was renamed from ``ec/launcher/power/reference/data`` in Data Dictionary ``3.39.0``. This - use case is now supported by IMASPy. + use case is now supported by imas-python. - Automatically convert data between 0D and 1D when possible (`IMAS-5170 `__). diff --git a/docs/source/ci_config.rst b/docs/source/ci_config.rst index ced4f52a..2fd284f4 100644 --- a/docs/source/ci_config.rst +++ b/docs/source/ci_config.rst @@ -3,25 +3,25 @@ CI configuration ================ -IMASPy uses `ITER Bamboo `_ for CI. This page provides an overview +imas-python uses `ITER Bamboo `_ for CI. This page provides an overview of the CI Plan and deployment projects. CI Plan ------- -The `IMASPy CI plan `_ consists of 4 types of jobs: +The `imas-python CI plan `_ consists of 4 types of jobs: Linting and DD ZIP This job is responsible for three things: 1. Verify that the ``IDSDef2MDSplusPreTree.xsl`` file matches the one in the Access Layer repository. This file is required for building MDSplus models and the - models built by IMASPy should match those built by the Access Layer. - 2. Linting: run ``black`` and ``flake8`` on the IMASPy code base. See :ref:`code + models built by imas-python should match those built by the Access Layer. + 2. Linting: run ``black`` and ``flake8`` on the imas-python code base. See :ref:`code style and linting`. 3. Build the Data Dictionary zip file. This Task builds the Data Dictionary for all tagged releases since DD version ``3.22.0``. These are combined into the - ``IDSDef.zip`` file, which is distributed with IMASPy. + ``IDSDef.zip`` file, which is distributed with imas-python. The ZIP file is built in a separate job, such that the subsequent test jobs can reuse this. @@ -32,11 +32,9 @@ Linting and DD ZIP - ``ci/build_dd_zip.sh`` Test with AL - This runs all unit tests with pytest. There are multiple (at the time of writing 3) - Access Layer versions that we test against: AL4.11.7 (from SDCC module - ``IMAS/3.39.0-4.11.7-intel-2020b``), AL5.0.0 (from SDCC module - ``IMAS/3.39.0-5.0.0-intel-2020b``) and AL5.1.0 (from SDCC module - ``IMAS/3.40.0-5.1.0-intel-2020b``). + This runs all unit tests with pytest. + Access Layer version that we test against: + IMAS-AL-Core/5.4.3-intel-2023b The CI script executed in this job is ``ci/run_pytest.sh``, which expects the modules it needs to load as arguments. @@ -51,7 +49,7 @@ Test with AL 5. In the "Script" Task, update the module(s) in the Argument field Benchmark - This job runs the :ref:`ASV benchmarks ` on the CI server. It + This job runs the :ref:`ASV benchmarks ` on the CI server. It is configured such that it can only run on a single CI agent (`io-ls-bamboowk6.iter.org`). There are two reasons for this: @@ -64,7 +62,7 @@ Benchmark The CI script executed in this job is: ``ci/run_benchmark.sh``. Build docs and dists - This job builds the Sphinx documentation and python packages for IMASPy (``sdist`` + This job builds the Sphinx documentation and python packages for imas-python (``sdist`` and ``wheel``). The CI script executed in this job is: ``ci/build_docs_and_dist.sh``. @@ -73,18 +71,14 @@ Build docs and dists Deployment projects ------------------- -There are two Bamboo deployment projects for IMASPy: +There is github workflow for imas-python: -`Deploy IMASPy-doc `_ - Deploy the documentation created in the `Build docs and dists` job to `Sharepoint - `_. +`imas-python-PyPi `_ + Deploy the python packages job to the https://pypi.org/ server and https://test.pypi.org/ server. + You can find link here : `imas-python `_ - This deployment project runs for after each successful CI build of the IMASPy main - branch. -`IMASPy-PyPi `_ - Deploy the python packages created in the `Build docs and dists` job to the - https://pypi.iter.org/ server. +`Deploy imas-python-doc `_ + Deploy the documentation using `readthedocs + `_. - This deployment project runs for after each successful CI build of the IMASPy main - branch. diff --git a/docs/source/cli.rst b/docs/source/cli.rst index 61d8251e..0fa3819a 100644 --- a/docs/source/cli.rst +++ b/docs/source/cli.rst @@ -1,32 +1,32 @@ -.. _`IMASPy Command Line tool`: +.. _`imas-python Command Line tool`: -IMASPy Command Line tool -======================== +imas-python Command Line tool +============================= -IMASPy comes with a command line tool: ``imaspy``. This allows you to execute +imas-python comes with a command line tool: ``imas``. This allows you to execute some tasks without writing Python code: -- ``imaspy convert`` can convert Data Entries (or, optionally, single IDSs from +- ``imas convert`` can convert Data Entries (or, optionally, single IDSs from a Data Entry) to a different DD version. This command can also be used to convert IDSs between different backends. -- ``imaspy print`` can print the contents of an IDS to the terminal. -- ``imaspy version`` shows version information of IMASPy. -- ``imaspy analyze-db`` and ``imaspy process-db-analysis`` analyze the contents +- ``imas print`` can print the contents of an IDS to the terminal. +- ``imas version`` shows version information of imas-python. +- ``imas analyze-db`` and ``imas process-db-analysis`` analyze the contents of one or more Data Entries (stored in the HDF5 backend format). This tool is - explained in more detail :ref:`below `. + explained in more detail :ref:`below `. You can get further details, including the expected command line arguments and options, by running any tool with the ``--help`` flag. This help is also available in the :ref:`Command line tool reference` below. -.. _`IMASPy Data Entry analysis`: +.. _`imas-python Data Entry analysis`: -IMASPy Data Entry analysis --------------------------- +imas-python Data Entry analysis +------------------------------- -The IMASPy Data Entry analysis tool is a set of two command line programs: -``imaspy analyze-db`` and ``imaspy process-db-analysis``. The tool analyzes the +The imas-python Data Entry analysis tool is a set of two command line programs: +``imas analyze-db`` and ``imas process-db-analysis``. The tool analyzes the files from the HDF5 backend to figure out which IDSs are stored in the Data Entry, and which fields from the Data Dictionary have any data stored. This provides statistical data that is useful for Data Dictionary maintenance: by @@ -37,12 +37,12 @@ adding, changing or removing data fields. Usage ''''' -The ``imaspy analyze-db`` is run first. Its output is then used by ``imaspy +The ``imas analyze-db`` is run first. Its output is then used by ``imas process-db-analysis`` to provide statistics on the collected data. -.. rubric:: ``imaspy analyze-db`` +.. rubric:: ``imas analyze-db`` -``imaspy analyze-db`` analyzes Data Entries. You need to provide one or more +``imas analyze-db`` analyzes Data Entries. You need to provide one or more paths to folders where HDF5-backend IMAS data is stored. .. note:: @@ -59,61 +59,61 @@ paths to folders where HDF5-backend IMAS data is stored. ``/public/imasdb/////`` folder, where ```` is typically ``/home/``. -The tool collects a small amount of metadata (see the output of ``imaspy +The tool collects a small amount of metadata (see the output of ``imas analyze-db --help`` for an overview) on top of the filled fields of IDSs. All data (the metadata, and usage data of the provided Data Entries) is stored in a `gzipped `__ `JSON `__ file. -By default this is output in ``imaspy-db-analysis.json.gz`` in the current +By default this is output in ``imas-db-analysis.json.gz`` in the current working directory, but this can be customized with the ``--output/-o`` option. If the output file already exists, the existing data is retained and the additional analysis data is *appended* to the file. .. code-block:: bash - :caption: Example usage of ``imaspy analyze-db`` + :caption: Example usage of ``imas analyze-db`` - # Analyze a single data entry, output to the default imaspy-db-analysis.json.gz - imaspy analyze-db /work/imas/shared/imasdb/iter_scenarios/3/106015/1/ + # Analyze a single data entry, output to the default imas-db-analysis.json.gz + imas analyze-db /work/imas/shared/imasdb/iter_scenarios/3/106015/1/ # Analyze a single data entry, provide a custom output filename - imaspy analyze-db ./test/dataset/ -o test-dataset-analysis.json.gz + imas analyze-db ./test/dataset/ -o test-dataset-analysis.json.gz # Analyze multiple data entries, use shell globbing to select all runs - imaspy analyze-db /work/imas/shared/imasdb/iter_scenarios/3/150601/*/ + imas analyze-db /work/imas/shared/imasdb/iter_scenarios/3/150601/*/ # Analyze **all** HDF5 Data Entries inside a folder # 1. Find all HDF5 Data Entries (by locating their master.h5 files) # in the ~/public/imasdb/ folder # 2. Get the directory names for each of these files - # 3. Pass the directories to imaspy analyze-db + # 3. Pass the directories to imas analyze-db find ~/public/imasdb/ -name master.h5 | \ xargs dirname | \ - xargs imaspy analyze-db + xargs imas analyze-db .. note:: - ``imaspy analyze-db`` only works with the HDF5 backend, because the data files + ``imas analyze-db`` only works with the HDF5 backend, because the data files stored by this backend allow for a fast way to check which fields in an IDS are filled. We use the `h5py `__ Python module, which needs to be available to run the tool. An error message instructing to install / activate ``h5py`` is provided when ``h5py`` cannot be loaded. - If your data is stored in another backend than HDF5, you can use ``imaspy + If your data is stored in another backend than HDF5, you can use ``imas convert`` to convert the data to the HDF5 backend. For example: .. code-block:: bash - imaspy convert \ - imas:mdsplus?path=/path/to/mdsplus/data 3.41.0 imas:hdf5?path=/tmp/imaspy-analysis + imas convert \ + imas:mdsplus?path=/path/to/mdsplus/data 3.41.0 imas:hdf5?path=/tmp/imas-analysis -.. rubric:: ``imaspy process-db-analysis`` +.. rubric:: ``imas process-db-analysis`` -Once you have one or more output files from ``imaspy analyze-db``, you can -process these files with ``imaspy process-db-analysis``. This will: +Once you have one or more output files from ``imas analyze-db``, you can +process these files with ``imas process-db-analysis``. This will: 1. Load all analysis results from the provided files, and compare this against the available fields in :ref:`The default Data Dictionary version` (which @@ -139,13 +139,13 @@ process these files with ``imaspy process-db-analysis``. This will: - Enter End Of File: *Ctrl+D*. .. code-block:: bash - :caption: Example usage for ``imaspy process-db-analysis`` + :caption: Example usage for ``imas process-db-analysis`` # Process a single analysis output - imaspy process-db-analysis imaspy-db-analysis.json.gz + imas process-db-analysis imas-db-analysis.json.gz # Process multiple outputs - imaspy process-db-anlysis workflow-1.json.gz workflow-2.json.gz + imas process-db-anlysis workflow-1.json.gz workflow-2.json.gz .. [#data_fields] Data fields are all fields in an IDS that can contain data. Structures and Arrays of Structures are not included. All data types @@ -158,7 +158,7 @@ process these files with ``imaspy process-db-analysis``. This will: Command line tool reference --------------------------- -.. click:: imaspy.command.cli:cli - :prog: imaspy +.. click:: imas.command.cli:cli + :prog: imas :nested: full \ No newline at end of file diff --git a/docs/source/code_style.rst b/docs/source/code_style.rst index 7729a4d7..e7f3913c 100644 --- a/docs/source/code_style.rst +++ b/docs/source/code_style.rst @@ -7,7 +7,7 @@ Code style and linting Code style ---------- -IMASPy follows `The Black Code Style +imas-python follows `The Black Code Style `_. All Python files should be formatted with the ``black`` command line tool (this is checked in :ref:`CI `). @@ -40,7 +40,7 @@ with pre-commit hooks): .. code-block:: console - $ black imaspy + $ black imas All done! ✨ 🍰 ✨ 66 files left unchanged. @@ -48,8 +48,8 @@ with pre-commit hooks): Linting ------- -IMASPy uses `flake8 `_ for linting (static code -analysis). Flake8 should not report any violations when running it on the ``imaspy`` +imas-python uses `flake8 `_ for linting (static code +analysis). Flake8 should not report any violations when running it on the ``imas`` code base. Again, this is checked in CI. In some exceptions we can ignore a violation. For example, if a violation cannot be @@ -74,5 +74,5 @@ your code introduces any violations: .. code-block:: console - $ flake8 imaspy + $ flake8 imas diff --git a/docs/source/conf.py b/docs/source/conf.py index 53b3ad77..d6e32651 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -17,44 +17,41 @@ from jinja2.defaults import DEFAULT_FILTERS from packaging.version import Version -import imaspy +import imas print("python exec:", sys.executable) print("sys.path:", sys.path) # -- Project information ----------------------------------------------------- # The documented project’s name -project = src_project = PROJECT = "IMASPy" -PACKAGE = "imaspy" -src_group = GROUP = "IMAS" +project = src_project = PROJECT = "imas-python" +PACKAGE = "imas" +GROUP = "IMAS" # A copyright statement in the style '2008, Author Name'. copyright = f"2020-{datetime.datetime.now().year}, ITER Organization" # The author name(s) of the document author = "ITER Organization" -src_host = "git.iter.org" +src_host = "https://github.com/iterorganization/" # Parse urls here for convenience, to be re-used - # ITER docs -iter_projects = "https://git.iter.org/projects/" -imas_repos = urljoin(iter_projects, "IMAS/") -imex_repos = urljoin(iter_projects, "IMEX/") -dd_url = urljoin(imas_repos, "repos/data-dictionary/") -al_url = urljoin(imas_repos, "repos/access-layer/") -issue_url = jira_url = "https://jira.iter.org/browse/" +iter_projects = "https://github.com/iterorganization/" +dd_url = urljoin(iter_projects, "imas-data-dictionary/") +al_url = urljoin(iter_projects, "imas-core/") +issue_url = jira_url = "https://github.com/iterorganization/imas-python/issues" -# IMASPy -repository_url = f"{iter_projects}/{src_group}/repos/{src_project}/" -blob_url = urljoin(repository_url, "browse/") -mr_url = urljoin(repository_url, "/pull-requests") +# imas-python +repository_url = f"{iter_projects}/{src_project}/" +blob_url = repository_url +mr_url = urljoin(repository_url, "/pulls") # Configuration of sphinx.ext.extlinks # See https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html # unique name: (base URL, label prefix) extlinks = { - "src": (blob_url + "%s", f"{src_group}/{src_project}/%s"), + "src": (blob_url + "%s", "%s"), "issue": (issue_url + "%s", "%s"), "merge": (mr_url + "%s", "!%s"), "dd": (dd_url + "%s", "%s"), @@ -62,7 +59,7 @@ "pypa": ("https://packaging.python.org/%s", None), } -full_version = Version(imaspy.__version__) +full_version = Version(imas.__version__) # version: The major project version, used as the replacement for |version|. # For example, for the Python documentation, this may be something like 2.6. @@ -137,10 +134,10 @@ # and # https://sphinx-immaterial.readthedocs.io/en/latest/customization.html#confval-html_theme_options html_theme_options = { - "repo_url": "https://git.iter.org/projects/IMAS/repos/imaspy", - "repo_name": "IMASPy", + "repo_url": "https://github.com/iterorganization/imas-python", + "repo_name": "imas-python", "icon": { - "repo": "fontawesome/brands/bitbucket", + "repo": "fontawesome/brands/github", }, "features": [ # "navigation.expand", @@ -202,7 +199,7 @@ # The name of an image file (relative to this directory) to place at the top # of the sidebar. -html_logo = "_static/imaspy_200x200.png" +html_logo = "_static/imas_200x200.png" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 @@ -261,7 +258,7 @@ # html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = "imaspy_doc" +htmlhelp_basename = "imas_doc" # -- Extension configuration ------------------------------------------------- @@ -356,7 +353,7 @@ def sphinx_click_process_usage(app, ctx, lines): def setup(app): DEFAULT_FILTERS["escape_underscores"] = escape_underscores - app.add_css_file("imaspy.css") + app.add_css_file("imas.css") # Customize output of sphinx-click app.connect("sphinx-click-process-arguments", sphinx_click_process_arguments) app.connect("sphinx-click-process-description", sphinx_click_process_description) diff --git a/docs/source/configuring.rst b/docs/source/configuring.rst index dae11b6f..388ac813 100644 --- a/docs/source/configuring.rst +++ b/docs/source/configuring.rst @@ -1,7 +1,7 @@ -Configuring IMASPy -================== +Configuring imas-python +======================= -IMASPy has a couple of environment variables that can be used to control its behaviour. +imas-python has a couple of environment variables that can be used to control its behaviour. This page provides an overview of available variables. .. note:: @@ -12,25 +12,25 @@ This page provides an overview of available variables. `_ -``IMASPY_LOGLEVEL`` - Sets the log level used by the IMASPy logger. +``IMAS_LOGLEVEL`` + Sets the log level used by the imas-python logger. By default (when this environment variable is not set), all log messages of ``INFO`` or more severe are logged. You may set this to, for example, - ``IMASPY_LOGLEVEL=WARNING``, to suppress some of the log messages. + ``IMAS_LOGLEVEL=WARNING``, to suppress some of the log messages. See the Python documentation for the :external:py:mod:`logging` module which log levels are available. .. note:: - This environment variable is read when the ``imaspy`` library is initialized - during the first ``import imaspy``. Changing it afterwards has no effect, but - you can use :external:py:meth:`logging.getLogger("imaspy").setLevel(...) + This environment variable is read when the ``imas`` library is initialized + during the first ``import imas``. Changing it afterwards has no effect, but + you can use :external:py:meth:`logging.getLogger("imas").setLevel(...) ` to change the log level programmatically. -``IMASPY_DISABLE_NC_VALIDATE`` +``IMAS_DISABLE_NC_VALIDATE`` Disables validation of netCDF files when loading an IDS from an IMAS netCDF file. .. caution:: @@ -44,16 +44,16 @@ Environment variables shared with the IMAS Python HLI ----------------------------------------------------- ``IMAS_AL_DISABLE_VALIDATE`` - By default, IMASPy :ref:`validates ` IDSs to check that all data is - consistent with their coordinates during a :py:meth:`~imaspy.db_entry.DBEntry.put` - or :py:meth:`~imaspy.db_entry.DBEntry.put_slice`. + By default, imas-python :ref:`validates ` IDSs to check that all data is + consistent with their coordinates during a :py:meth:`~imas.db_entry.DBEntry.put` + or :py:meth:`~imas.db_entry.DBEntry.put_slice`. Setting ``IMAS_AL_DISABLE_VALIDATE=1`` disables this validation. ``IMAS_AL_SERIALIZER_TMP_DIR`` Specify the path to storing temporary data during - :py:meth:`~imaspy.ids_toplevel.IDSToplevel.serialize` and - :py:meth:`~imaspy.ids_toplevel.IDSToplevel.deserialize`. + :py:meth:`~imas.ids_toplevel.IDSToplevel.serialize` and + :py:meth:`~imas.ids_toplevel.IDSToplevel.deserialize`. If it is not set, the default location ``/dev/shm/`` or the current working directory will be chosen. diff --git a/docs/source/courses/advanced/dd_versions.rst b/docs/source/courses/advanced/dd_versions.rst index 7b3eb02e..5ccb2474 100644 --- a/docs/source/courses/advanced/dd_versions.rst +++ b/docs/source/courses/advanced/dd_versions.rst @@ -3,13 +3,13 @@ Working with multiple data dictionary versions ============================================== -Contrary to most high level interface for IMAS, IMASPy code is not tied to a specific -version of the Data Dictionary. In this lesson we will explore how IMASPy handles +Contrary to most high level interface for IMAS, imas-python code is not tied to a specific +version of the Data Dictionary. In this lesson we will explore how imas-python handles different DD versions (including development builds of the DD), and how we can convert IDSs between different versions of the Data Dictionary. .. note:: - Most of the time you won't need to worry about DD versions and the default IMASPy + Most of the time you won't need to worry about DD versions and the default imas-python behaviour should be fine. @@ -19,7 +19,7 @@ The default Data Dictionary version ----------------------------------- In the other training lessons, we didn't explicitly work with Data Dictionary versions. -Therefore IMASPy was always using the `default` DD version. Let's find out what that +Therefore imas-python was always using the `default` DD version. Let's find out what that version is: @@ -32,35 +32,35 @@ Exercise 1: The default DD version .. md-tab-item:: Exercise - 1. Create an :py:class:`imaspy.IDSFactory() `. + 1. Create an :py:class:`imas.IDSFactory() `. 2. Print the version of the DD that is used. 3. Create an empty IDS with this IDSFactory (any IDS is fine) and print the DD version of the IDS, see - :py:meth:`~imaspy.util.get_data_dictionary_version`. What do you notice? - 4. Create an :py:class:`imaspy.DBEntry `, you may use - the :py:attr:`MEMORY_BACKEND `. Print the + :py:meth:`~imas.util.get_data_dictionary_version`. What do you notice? + 4. Create an :py:class:`imas.DBEntry `, you may use + the :py:attr:`MEMORY_BACKEND `. Print the DD version that is used. What do you notice? .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/dd_versions.py + .. literalinclude:: imas_snippets/dd_versions.py -Okay, so now you know what your default DD version is. But how is it determined? IMASPy +Okay, so now you know what your default DD version is. But how is it determined? imas-python first checks if you have an IMAS environment loaded by checking the environment variable ``IMAS_VERSION``. If you are on a cluster and have used ``module load IMAS`` or similar, this environment variable will indicate what data dictionary version this module is -using. IMASPy will use that version as its default. +using. imas-python will use that version as its default. -If the ``IMAS_VERSION`` environment is not set, IMASPy will take the newest version of +If the ``IMAS_VERSION`` environment is not set, imas-python will take the newest version of the Data Dictionary that came bundled with it. Which brings us to the following topic: Bundled Data Dictionary definitions ----------------------------------- -IMASPy comes bundled [#DDdefs]_ with many versions of the Data Dictionary definitions. +imas-python comes bundled [#DDdefs]_ with many versions of the Data Dictionary definitions. You can find out which versions are available by calling -:py:meth:`imaspy.dd_zip.dd_xml_versions`. +:py:meth:`imas.dd_zip.dd_xml_versions`. Converting an IDS between Data Dictionary versions @@ -74,7 +74,7 @@ things that could change: - Change the data type of an IDS node - Rename an IDS node -IMASPy can convert between different versions of the DD and will migrate the data as +imas-python can convert between different versions of the DD and will migrate the data as much as possible. Let's see how this works in the following exercise. @@ -89,23 +89,23 @@ Exercise 2: Convert an IDS between DD versions for the ``pulse_schedule`` IDS because a number of IDS nodes were renamed for this IDS. - 1. Create an :py:class:`imaspy.IDSFactory() ` + 1. Create an :py:class:`imas.IDSFactory() ` for DD version ``3.25.0``. 2. Create a ``pulse_schedule`` IDS with this IDSFactory and verify that it is using DD version ``3.25.0``. 3. Fill the IDS with some test data: - .. literalinclude:: imaspy_snippets/ids_convert.py + .. literalinclude:: imas_snippets/ids_convert.py :start-after: # 3. :end-before: # 4. - 4. Use :py:func:`imaspy.convert_ids ` to + 4. Use :py:func:`imas.convert_ids ` to convert the IDS to DD version 3.39.0. The ``antenna`` structure that we filled in the old version of the DD has since been renamed to ``launcher``, and the ``launching_angle_*`` structures to ``steering_angle``. Check that - IMASPy has converted the data successfully (for example with - :py:func:`imaspy.util.print_tree`). - 5. By default, IMASPy creates a shallow copy of the data, which means that the + imas-python has converted the data successfully (for example with + :py:func:`imas.util.print_tree`). + 5. By default, imas-python creates a shallow copy of the data, which means that the underlying data arrays are shared between the IDSs of both versions. Update the ``time`` data of the original IDS (for example: :code:`pulse_schedule.time[1] = 3`) and print the ``time`` data of the @@ -113,7 +113,7 @@ Exercise 2: Convert an IDS between DD versions .. note:: - :py:func:`imaspy.convert_ids ` has an + :py:func:`imas.convert_ids ` has an optional keyword argument ``deep_copy``. If you set this to ``True``, the converted IDS will not share data with the original IDS. @@ -126,7 +126,7 @@ Exercise 2: Convert an IDS between DD versions .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/ids_convert.py + .. literalinclude:: imas_snippets/ids_convert.py .. _`Automatic conversion between DD versions`: @@ -134,22 +134,22 @@ Exercise 2: Convert an IDS between DD versions Automatic conversion between DD versions ---------------------------------------- -When loading data (with :py:meth:`~imaspy.db_entry.DBEntry.get` or -:py:meth:`~imaspy.db_entry.DBEntry.get_slice`) or storing data (with -:py:meth:`~imaspy.db_entry.DBEntry.put` or -:py:meth:`~imaspy.db_entry.DBEntry.put_slice`), IMASPy automatically converts the DD +When loading data (with :py:meth:`~imas.db_entry.DBEntry.get` or +:py:meth:`~imas.db_entry.DBEntry.get_slice`) or storing data (with +:py:meth:`~imas.db_entry.DBEntry.put` or +:py:meth:`~imas.db_entry.DBEntry.put_slice`), imas-python automatically converts the DD version for you. In this section we will see how that works. The ``DBEntry`` DD version '''''''''''''''''''''''''' -A :py:class:`~imaspy.db_entry.DBEntry` object is tied to a specific version of the Data +A :py:class:`~imas.db_entry.DBEntry` object is tied to a specific version of the Data Dictionary. We have already briefly seen this in :ref:`dd version exercise 1`. The DD version can be selected when constructing a new ``DBEntry`` object, through the -:py:param:`~imaspy.db_entry.DBEntry.__init__.dd_version` or -:py:param:`~imaspy.db_entry.DBEntry.__init__.xml_path` (see also :ref:`Using custom +:py:param:`~imas.db_entry.DBEntry.__init__.dd_version` or +:py:param:`~imas.db_entry.DBEntry.__init__.xml_path` (see also :ref:`Using custom builds of the Data Dictionary`) parameters. If you provide neither, the default DD version is used. @@ -174,8 +174,8 @@ Exercise 3: Automatic conversion when storing IDSs .. code-block:: python - new_entry = imaspy.DBEntry( - imaspy.ids_defs.MEMORY_BACKEND, "test", 0, 0, dd_version="3.37.0" + new_entry = imas.DBEntry( + imas.ids_defs.MEMORY_BACKEND, "test", 0, 0, dd_version="3.37.0" ) 4. Put the ``core_profiles`` IDS in the new ``DBEntry``. @@ -184,7 +184,7 @@ Exercise 3: Automatic conversion when storing IDSs .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/autoconvert_put.py + .. literalinclude:: imas_snippets/autoconvert_put.py Exercise 4: Automatic conversion when loading IDSs @@ -196,24 +196,24 @@ Exercise 4: Automatic conversion when loading IDSs 1. For this exercise we will first create some test data: - .. literalinclude:: imaspy_snippets/autoconvert_get.py + .. literalinclude:: imas_snippets/autoconvert_get.py :start-after: # 1. :end-before: # 2. 2. Reopen the ``DBEntry`` with the default DD version. 3. ``get`` the pulse schedule IDS. Print its ``version_put/data_dictionary`` and Data Dictionary version (with - :py:meth:`~imaspy.util.get_data_dictionary_version`). What do you + :py:meth:`~imas.util.get_data_dictionary_version`). What do you notice? - 4. Use ``imaspy.util.print_tree`` to print all data in the loaded IDS. What do + 4. Use ``imas.util.print_tree`` to print all data in the loaded IDS. What do you notice? 5. Repeat steps 3 and 4, but set - :py:param:`~imaspy.db_entry.DBEntry.get.autoconvert` to ``False``. What do + :py:param:`~imas.db_entry.DBEntry.get.autoconvert` to ``False``. What do you notice this time? .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/autoconvert_get.py + .. literalinclude:: imas_snippets/autoconvert_get.py Use cases for disabling autoconvert @@ -230,7 +230,7 @@ contain large changes between DD versions, such as: .. caution:: - The :py:meth:`~imaspy.ids_convert.convert_ids` method warns you when data is not + The :py:meth:`~imas.ids_convert.convert_ids` method warns you when data is not converted. Due to technical constraints, the ``autoconvert`` logic doesn't log any such warnings. @@ -240,7 +240,7 @@ contain large changes between DD versions, such as: >>> # Continuing with the example from Exercise 4: >>> ps_noconvert = entry.get("pulse_schedule", autoconvert=False) - >>> imaspy.convert_ids(ps_noconvert, "3.40.0") + >>> imas.convert_ids(ps_noconvert, "3.40.0") 15:32:32 INFO Parsing data dictionary version 3.40.0 @dd_zip.py:129 15:32:32 INFO Starting conversion of IDS pulse_schedule from version 3.25.0 to version 3.40.0. @ids_convert.py:350 15:32:32 INFO Element 'ec/antenna/phase' does not exist in the target IDS. Data is not copied. @ids_convert.py:396 @@ -255,7 +255,7 @@ contain large changes between DD versions, such as: Using custom builds of the Data Dictionary ------------------------------------------ -In the previous sections we showed how you can direct IMASPy to use a specific released +In the previous sections we showed how you can direct imas-python to use a specific released version of the Data Dictionary definitions. Sometimes it is useful to work with unreleased (development or custom) versions of the data dictionaries as well. @@ -267,21 +267,21 @@ unreleased (development or custom) versions of the data dictionaries as well. might not be read properly in the future. If you build the Data Dictionary, a file called ``IDSDef.xml`` is created. This file -contains all IDS definitions. To work with a custom DD build, you need to point IMASPy +contains all IDS definitions. To work with a custom DD build, you need to point imas-python to this ``IDSDef.xml`` file: .. code-block:: python - :caption: Use a custom Data Dictionary build with IMASPy + :caption: Use a custom Data Dictionary build with imas-python my_idsdef_file = "path/to/IDSDef.xml" # Replace with the actual path # Point IDSFactory to this path: - my_factory = imaspy.IDSFactory(xml_path=my_idsdef_file) + my_factory = imas.IDSFactory(xml_path=my_idsdef_file) # Now you can create IDSs using your custom DD build: my_ids = my_factory.new("...") # If you need a DBEntry to put / get IDSs in the custom version: - my_entry = imaspy.DBEntry("imas:hdf5?path=my-testdb", "w", xml_path=my_idsdef_file) + my_entry = imas.DBEntry("imas:hdf5?path=my-testdb", "w", xml_path=my_idsdef_file) Once you have created the ``IDSFactory`` and/or ``DBEntry`` pointing to your custom DD @@ -291,5 +291,5 @@ build, you can use them like you normally would. .. rubric:: Footnotes .. [#DDdefs] To be more precise, the Data Dictionary definitions are generated when the - IMASPy package is created. See :ref:`this reference
` for more + imas-python package is created. See :ref:`this reference
` for more details. diff --git a/docs/source/courses/advanced/explore.rst b/docs/source/courses/advanced/explore.rst index 86b692e8..5fa6fdca 100644 --- a/docs/source/courses/advanced/explore.rst +++ b/docs/source/courses/advanced/explore.rst @@ -1,10 +1,10 @@ Advanced data exploration ========================= -In the :ref:`basic/explore` training we have seen how to explore IMASPy data structures +In the :ref:`basic/explore` training we have seen how to explore imas-python data structures in an interactive way. -In this lesson, we will go a step further and look at methods to explore IMASPy data +In this lesson, we will go a step further and look at methods to explore imas-python data structures programmatically. This can be useful for, for example, writing plotting tools, analysis scripts, etc. @@ -13,26 +13,26 @@ Exploring IDS (sub)structures ----------------------------- An IDS structure is a collection of IDS nodes (which could be structures, or arrays of -structures themselves). In IMASPy this is represented by the -:py:class:`~imaspy.ids_structure.IDSStructure` class. You will find these classes in a +structures themselves). In imas-python this is represented by the +:py:class:`~imas.ids_structure.IDSStructure` class. You will find these classes in a lot of places: - Data Dictionary IDSs is a special case of an IDS structure (implemented by class - :py:class:`~imaspy.ids_toplevel.IDSToplevel`, which is a subclass of + :py:class:`~imas.ids_toplevel.IDSToplevel`, which is a subclass of ``IDSStructure``). - Data Dictionary structures, for example, the ``ids_properties`` structure that is present in every IDS. - Data Dictionary arrays of structures (implemented by - :py:class:`~imaspy.ids_struct_array.IDSStructArray`) contain ``IDSStructure``\ s. + :py:class:`~imas.ids_struct_array.IDSStructArray`) contain ``IDSStructure``\ s. When you have an ``IDSStructure`` object, you can iterate over it to get all child nodes that are contained in this structure. See the following example: .. code-block:: python - import imaspy + import imas - core_profiles = imaspy.IDSFactory().core_profiles() + core_profiles = imas.IDSFactory().core_profiles() # core_profiles is an IDS toplevel, which is also a structure: print("Core profiles contains the following elements:") @@ -61,15 +61,15 @@ Exercise 1: Explore structures .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/explore_structures.py + .. literalinclude:: imas_snippets/explore_structures.py Explore IDS data nodes and arrays of structures ----------------------------------------------- Besides structures, IDSs contain arrays of structures, and data nodes. Arrays of -structures (modeled by :py:class:`~imaspy.ids_struct_array.IDSStructArray`) are (as the -name applies) arrays containing :py:class:`~imaspy.ids_structure.IDSStructure`\ s. Data +structures (modeled by :py:class:`~imas.ids_struct_array.IDSStructArray`) are (as the +name applies) arrays containing :py:class:`~imas.ids_structure.IDSStructure`\ s. Data nodes can contain scalar or array data of various types. Some methods and properties are defined for all data nodes and arrays of structures: @@ -106,14 +106,14 @@ Some methods and properties are defined for all data nodes and arrays of structu details. .. seealso:: - You can find more details on IDS data node related classes and methods in the IMASPy Architecture documentation: - :ref:`imaspy_architecture/IDS_nodes` + You can find more details on IDS data node related classes and methods in the imas-python Architecture documentation: + :ref:`imas_architecture/IDS_nodes` Apply a function to all nodes in an IDS ''''''''''''''''''''''''''''''''''''''' Before diving into the exercise and use this new knowledge, it is useful to know the -:py:meth:`imaspy.util.visit_children` method. This method allows you to apply a method +:py:meth:`imas.util.visit_children` method. This method allows you to apply a method to all nodes of an IDS. Additional keyword arguments can control whether you want to include leaf nodes (data nodes) only, or also include structures and arrays of structure. You can also choose between applying the function to filled nodes only (the @@ -122,7 +122,7 @@ default) or all nodes, including empty ones. .. seealso:: You can find more details in the API documentation: - :py:meth:`imaspy.util.visit_children` + :py:meth:`imas.util.visit_children` Exercise 2: Explore data nodes @@ -134,7 +134,7 @@ Exercise 2: Explore data nodes 1. Load the training data for the ``equilibrium`` IDS. 2. Create a function that prints the path, shape and size of an IDS node. - 3. Use :py:meth:`~imaspy.util.visit_children` to apply the function to all + 3. Use :py:meth:`~imas.util.visit_children` to apply the function to all non-empty nodes in the equilbrium IDS. 4. Update your function such that it skips scalar (0D) IDS nodes. Apply the updated function to the equilibrium IDS. @@ -142,9 +142,9 @@ Exercise 2: Explore data nodes .. hint:: :collapsible: - Review IMASPy Architecture documentation for data node methods: - :ref:`imaspy_architecture/IDS_nodes` + Review imas-python Architecture documentation for data node methods: + :ref:`imas_architecture/IDS_nodes` .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/explore_data.py + .. literalinclude:: imas_snippets/explore_data.py diff --git a/docs/source/courses/advanced/hashing.rst b/docs/source/courses/advanced/hashing.rst index 4bac1fda..bc9d77fe 100644 --- a/docs/source/courses/advanced/hashing.rst +++ b/docs/source/courses/advanced/hashing.rst @@ -1,14 +1,14 @@ Calculating hashes of IMAS data =============================== -IMASPy can calculate *hashes* of IMAS data. As `Wikipedia explains better than I could +imas-python can calculate *hashes* of IMAS data. As `Wikipedia explains better than I could do `__: A hash function is any function that can be used to map data of arbitrary size to fixed-size values, [...]. The values returned by a hash function are called *hash values*, *hash codes*, *hash digests*, *digests*, or simply *hashes*. -IMASPy is using the XXH3 hash function from the `xxHash project +imas-python is using the XXH3 hash function from the `xxHash project `__. This is a *non-cryptographic* hash and returns 64-bit hashes. @@ -33,7 +33,7 @@ Exercise 1: Calculate some hashes .. md-tab-item:: Exercise - In this exercise we will use :py:func:`imaspy.util.calc_hash` to calculate + In this exercise we will use :py:func:`imas.util.calc_hash` to calculate hashes of some IDSs. Use :external:py:meth:`bytes.hex` to show a more readable hexidecimal format of the hash. @@ -51,11 +51,11 @@ Exercise 1: Calculate some hashes .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/hashing.py + .. literalinclude:: imas_snippets/hashing.py -Properties of IMASPy's hashes ------------------------------ +Properties of imas-python's hashes +---------------------------------- The implementation of the hash function has the following properties: @@ -91,4 +91,4 @@ Technical details and specification ----------------------------------- You can find the technical details, and a specification for calculating the hashes, in -the documentation of :py:meth:`imaspy.util.calc_hash`. +the documentation of :py:meth:`imas.util.calc_hash`. diff --git a/docs/source/courses/advanced/imaspy_snippets/alternative_coordinates.py b/docs/source/courses/advanced/imas_snippets/alternative_coordinates.py similarity index 91% rename from docs/source/courses/advanced/imaspy_snippets/alternative_coordinates.py rename to docs/source/courses/advanced/imas_snippets/alternative_coordinates.py index 11816362..e4adfceb 100644 --- a/docs/source/courses/advanced/imaspy_snippets/alternative_coordinates.py +++ b/docs/source/courses/advanced/imas_snippets/alternative_coordinates.py @@ -1,7 +1,7 @@ -import imaspy +import imas # 1. Create an empty distributions IDS -distributions = imaspy.IDSFactory().distributions() +distributions = imas.IDSFactory().distributions() # 2. Use the metadata attribute to find the coordinates of # distribution/profiles_2d/density @@ -21,7 +21,7 @@ # What do you notice: in both dimensions there are multiple options for the coordinate. # 3. Retrieve the coordinate values through the ``coordinates`` attribute. -# This will raise a coordinate lookup error because IMASPy cannot choose which of the +# This will raise a coordinate lookup error because imas-python cannot choose which of the # coordinates to use: try: print(p2d.density.coordinates[0]) diff --git a/docs/source/courses/advanced/imaspy_snippets/autoconvert_get.py b/docs/source/courses/advanced/imas_snippets/autoconvert_get.py similarity index 83% rename from docs/source/courses/advanced/imaspy_snippets/autoconvert_get.py rename to docs/source/courses/advanced/imas_snippets/autoconvert_get.py index f2b03aee..76ee8e90 100644 --- a/docs/source/courses/advanced/imaspy_snippets/autoconvert_get.py +++ b/docs/source/courses/advanced/imas_snippets/autoconvert_get.py @@ -1,17 +1,17 @@ -import imaspy -from imaspy.ids_defs import ASCII_BACKEND, IDS_TIME_MODE_HOMOGENEOUS -from imaspy.util import get_data_dictionary_version +import imas +from imas.ids_defs import ASCII_BACKEND, IDS_TIME_MODE_HOMOGENEOUS +from imas.util import get_data_dictionary_version # 1. Create test data # Create an IDSFactory for DD 3.25.0 -factory = imaspy.IDSFactory("3.25.0") +factory = imas.IDSFactory("3.25.0") # Create a pulse_schedule IDS pulse_schedule = factory.new("pulse_schedule") # Fill the IDS with some test data pulse_schedule.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS -pulse_schedule.ids_properties.comment = "Testing renamed IDS nodes with IMASPy" +pulse_schedule.ids_properties.comment = "Testing renamed IDS nodes with imas-python" pulse_schedule.time = [1.0, 1.1, 1.2] pulse_schedule.ec.antenna.resize(1) @@ -28,13 +28,13 @@ antenna.phase.reference_name = "Phase reference name" # And store the IDS in a DBEntry using DD 3.25.0 -entry = imaspy.DBEntry(ASCII_BACKEND, "autoconvert", 1, 1, dd_version="3.25.0") +entry = imas.DBEntry(ASCII_BACKEND, "autoconvert", 1, 1, dd_version="3.25.0") entry.create() entry.put(pulse_schedule) entry.close() # 2. Reopen the DBEntry with DD 3.42.0: -entry = imaspy.DBEntry(ASCII_BACKEND, "autoconvert", 1, 1, dd_version="3.42.0") +entry = imas.DBEntry(ASCII_BACKEND, "autoconvert", 1, 1, dd_version="3.42.0") entry.open() # 3. Get the pulse schedule IDS @@ -47,7 +47,7 @@ # get_data_dictionary_version: 3.40.0 -> the IDS was automatically converted # 4. Print the data in the loaded IDS -imaspy.util.print_tree(ps_autoconvert) +imas.util.print_tree(ps_autoconvert) # What do you notice? # 1. The antenna AoS was renamed # 2. Several nodes no longer exist! @@ -65,6 +65,6 @@ # get_data_dictionary_version: 3.25.0 -> the IDS was not converted! # Print the data in the loaded IDS -imaspy.util.print_tree(ps_noconvert) +imas.util.print_tree(ps_noconvert) # What do you notice? # All data is here exactly as it was put at the beginnning of this exercise. diff --git a/docs/source/courses/advanced/imaspy_snippets/autoconvert_put.py b/docs/source/courses/advanced/imas_snippets/autoconvert_put.py similarity index 71% rename from docs/source/courses/advanced/imaspy_snippets/autoconvert_put.py rename to docs/source/courses/advanced/imas_snippets/autoconvert_put.py index 0ab5d121..ba9d2b48 100644 --- a/docs/source/courses/advanced/imaspy_snippets/autoconvert_put.py +++ b/docs/source/courses/advanced/imas_snippets/autoconvert_put.py @@ -1,17 +1,17 @@ -import imaspy -import imaspy.training -from imaspy.util import get_data_dictionary_version +import imas +import imas.training +from imas.util import get_data_dictionary_version # 1. Load the training data for the ``core_profiles`` IDS -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() core_profiles = entry.get("core_profiles") # 2. Print the DD version: print(get_data_dictionary_version(core_profiles)) # 3. Create a new DBEntry with DD version 3.37.0 -new_entry = imaspy.DBEntry( - imaspy.ids_defs.MEMORY_BACKEND, "test", 0, 0, dd_version="3.37.0" +new_entry = imas.DBEntry( + imas.ids_defs.MEMORY_BACKEND, "test", 0, 0, dd_version="3.37.0" ) new_entry.create() diff --git a/docs/source/courses/advanced/imaspy_snippets/calc_with_units.py b/docs/source/courses/advanced/imas_snippets/calc_with_units.py similarity index 93% rename from docs/source/courses/advanced/imaspy_snippets/calc_with_units.py rename to docs/source/courses/advanced/imas_snippets/calc_with_units.py index 6bc131f4..fd2253ed 100644 --- a/docs/source/courses/advanced/imaspy_snippets/calc_with_units.py +++ b/docs/source/courses/advanced/imas_snippets/calc_with_units.py @@ -1,11 +1,11 @@ import itertools # python standard library iteration tools -import imaspy -import imaspy.training +import imas +import imas.training import pint # 1. Load core_profiles IDS from training DBEntry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() cp = entry.get("core_profiles") # 2. Select the first time slice of profiles_1d diff --git a/docs/source/courses/advanced/imaspy_snippets/coordinates.py b/docs/source/courses/advanced/imas_snippets/coordinates.py similarity index 88% rename from docs/source/courses/advanced/imaspy_snippets/coordinates.py rename to docs/source/courses/advanced/imas_snippets/coordinates.py index db56f844..8b9b67fe 100644 --- a/docs/source/courses/advanced/imaspy_snippets/coordinates.py +++ b/docs/source/courses/advanced/imas_snippets/coordinates.py @@ -1,7 +1,7 @@ -import imaspy.training +import imas.training # 1. Load the training data for the core_profiles IDS: -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() core_profiles = entry.get("core_profiles") # 1a. Print the coordinate of profiles_1d[0].electrons.temperature @@ -17,13 +17,13 @@ # 1c. Change the time mode and print again core_profiles.ids_properties.homogeneous_time = \ - imaspy.ids_defs.IDS_TIME_MODE_HETEROGENEOUS + imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS print(core_profiles.profiles_1d.coordinates[0]) # What has changed? Now we get a numpy array with values -9e+40: # [-9.e+40 -9.e+40 -9.e+40] # # In heterogeneous time, the coordinate of profiles_1d is profiles_1d/time, which is a -# scalar. IMASPy will construct a numpy array for you where +# scalar. imas-python will construct a numpy array for you where # array[i] := profiles_1d[i]/time # Since we didn't set these values, they are set to the default EMPTY_FLOAT, which is # -9e+40. @@ -37,7 +37,7 @@ # This will output: # (IDSCoordinate('1...N'),) # The coordinate of profiles_2d is an index. When requesting the coordinate values, -# IMASPy will generate an index array for you: +# imas-python will generate an index array for you: print(slice0.profiles_2d.coordinates[0]) # -> array([0]) diff --git a/docs/source/courses/advanced/imaspy_snippets/dd_versions.py b/docs/source/courses/advanced/imas_snippets/dd_versions.py similarity index 76% rename from docs/source/courses/advanced/imaspy_snippets/dd_versions.py rename to docs/source/courses/advanced/imas_snippets/dd_versions.py index 24046bf2..1f9b3a69 100644 --- a/docs/source/courses/advanced/imaspy_snippets/dd_versions.py +++ b/docs/source/courses/advanced/imas_snippets/dd_versions.py @@ -1,8 +1,8 @@ -import imaspy -from imaspy.util import get_data_dictionary_version +import imas +from imas.util import get_data_dictionary_version # 1. Create an IDSFactory -default_factory = imaspy.IDSFactory() +default_factory = imas.IDSFactory() # 2. Print the DD version used by the IDSFactory # @@ -17,9 +17,9 @@ # it. # 4. Create a new DBEntry -default_entry = imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "test", 0, 0) +default_entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 0, 0) default_entry.create() # Alternative URI syntax when using AL5.0.0: -# default_entry = imaspy.DBEntry("imas:memory?path=.") +# default_entry = imas.DBEntry("imas:memory?path=.") print("DD version used for the DBEntry:", get_data_dictionary_version(default_entry)) # What do you notice? It is the same default version again. diff --git a/docs/source/courses/advanced/imaspy_snippets/explore_data.py b/docs/source/courses/advanced/imas_snippets/explore_data.py similarity index 68% rename from docs/source/courses/advanced/imaspy_snippets/explore_data.py rename to docs/source/courses/advanced/imas_snippets/explore_data.py index e79f5415..5a9f824c 100644 --- a/docs/source/courses/advanced/imaspy_snippets/explore_data.py +++ b/docs/source/courses/advanced/imas_snippets/explore_data.py @@ -1,9 +1,9 @@ -import imaspy -import imaspy.training -from imaspy.util import get_full_path +import imas +import imas.training +from imas.util import get_full_path # 1. Load the training data equilibrium IDS -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() equilibrium = entry.get("equilibrium") @@ -13,7 +13,7 @@ def print_path_shape_size(node): # 3. Apply to equilibrium IDS -imaspy.util.visit_children(print_path_shape_size, equilibrium) +imas.util.visit_children(print_path_shape_size, equilibrium) print() @@ -25,4 +25,4 @@ def print_path_shape_size_not0d(node): # And apply to the equilibrium IDS -imaspy.util.visit_children(print_path_shape_size_not0d, equilibrium) +imas.util.visit_children(print_path_shape_size_not0d, equilibrium) diff --git a/docs/source/courses/advanced/imaspy_snippets/explore_structures.py b/docs/source/courses/advanced/imas_snippets/explore_structures.py similarity index 85% rename from docs/source/courses/advanced/imaspy_snippets/explore_structures.py rename to docs/source/courses/advanced/imas_snippets/explore_structures.py index de4691b3..8cc9730a 100644 --- a/docs/source/courses/advanced/imaspy_snippets/explore_structures.py +++ b/docs/source/courses/advanced/imas_snippets/explore_structures.py @@ -1,8 +1,8 @@ -import imaspy -import imaspy.training +import imas +import imas.training # 1. Load the equilibrium IDS from the training data -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() equilibrium = entry.get("equilibrium") # 2. Print non-empty child nodes diff --git a/docs/source/courses/advanced/imaspy_snippets/hashing.py b/docs/source/courses/advanced/imas_snippets/hashing.py similarity index 59% rename from docs/source/courses/advanced/imaspy_snippets/hashing.py rename to docs/source/courses/advanced/imas_snippets/hashing.py index 2a5d5bca..74e12be6 100644 --- a/docs/source/courses/advanced/imaspy_snippets/hashing.py +++ b/docs/source/courses/advanced/imas_snippets/hashing.py @@ -1,19 +1,19 @@ -import imaspy +import imas # 1. Create IDS -eq = imaspy.IDSFactory().equilibrium() -print(imaspy.util.calc_hash(eq).hex(' ', 2)) # 2d06 8005 38d3 94c2 +eq = imas.IDSFactory().equilibrium() +print(imas.util.calc_hash(eq).hex(' ', 2)) # 2d06 8005 38d3 94c2 # 2. Update homogeneous_time eq.ids_properties.homogeneous_time = 0 -print(imaspy.util.calc_hash(eq).hex(' ', 2)) # 3b9b 9297 56a2 42fd +print(imas.util.calc_hash(eq).hex(' ', 2)) # 3b9b 9297 56a2 42fd # Yes: the hash changed (significantly!). This was expected, because the data is no # longer the same # 3. Resize time_slice eq.time_slice.resize(2) -print(imaspy.util.calc_hash(eq.time_slice[0]).hex(' ', 2)) # 2d06 8005 38d3 94c2 -print(imaspy.util.calc_hash(eq.time_slice[1]).hex(' ', 2)) # 2d06 8005 38d3 94c2 +print(imas.util.calc_hash(eq.time_slice[0]).hex(' ', 2)) # 2d06 8005 38d3 94c2 +print(imas.util.calc_hash(eq.time_slice[1]).hex(' ', 2)) # 2d06 8005 38d3 94c2 # What do you notice? # # The hashes of both time_slice[0] and time_slice[1] are identical, because both @@ -29,15 +29,15 @@ # 5. Fill data p2d.r = [[1., 2.]] p2d.z = p2d.r -print(imaspy.util.calc_hash(p2d.r).hex(' ', 2)) # 352b a6a6 b40c 708d -print(imaspy.util.calc_hash(p2d.z).hex(' ', 2)) # 352b a6a6 b40c 708d +print(imas.util.calc_hash(p2d.r).hex(' ', 2)) # 352b a6a6 b40c 708d +print(imas.util.calc_hash(p2d.z).hex(' ', 2)) # 352b a6a6 b40c 708d # These hashes are identical, because they contain the same data # 6. Only r or z del p2d.z -print(imaspy.util.calc_hash(p2d).hex(' ', 2)) # 0dcb ddaa 78ea 83a3 +print(imas.util.calc_hash(p2d).hex(' ', 2)) # 0dcb ddaa 78ea 83a3 p2d.z = p2d.r del p2d.r -print(imaspy.util.calc_hash(p2d).hex(' ', 2)) # f86b 8ea8 9652 3768 +print(imas.util.calc_hash(p2d).hex(' ', 2)) # f86b 8ea8 9652 3768 # Although the data inside `r` and `z` is identical, we get different hashes because the # data is in a different attribute. diff --git a/docs/source/courses/advanced/imaspy_snippets/ids_convert.py b/docs/source/courses/advanced/imas_snippets/ids_convert.py similarity index 83% rename from docs/source/courses/advanced/imaspy_snippets/ids_convert.py rename to docs/source/courses/advanced/imas_snippets/ids_convert.py index 70f1892d..77ea422c 100644 --- a/docs/source/courses/advanced/imaspy_snippets/ids_convert.py +++ b/docs/source/courses/advanced/imas_snippets/ids_convert.py @@ -1,8 +1,8 @@ -import imaspy -from imaspy.util import get_data_dictionary_version +import imas +from imas.util import get_data_dictionary_version # 1. Create an IDSFactory for DD 3.25.0 -factory = imaspy.IDSFactory("3.25.0") +factory = imas.IDSFactory("3.25.0") # 2. Create a pulse_schedule IDS pulse_schedule = factory.new("pulse_schedule") @@ -10,9 +10,9 @@ # 3. Fill the IDS with some test data pulse_schedule.ids_properties.homogeneous_time = \ - imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS pulse_schedule.ids_properties.comment = \ - "Testing renamed IDS nodes with IMASPy" + "Testing renamed IDS nodes with imas-python" pulse_schedule.time = [1., 1.1, 1.2] pulse_schedule.ec.antenna.resize(1) @@ -26,10 +26,10 @@ antenna.launching_angle_tor.reference.data = [3.1, 3.2, 3.3] # 4. Convert the IDS from version 3.25.0 to 3.39.0 -pulse_schedule_3_39 = imaspy.convert_ids(pulse_schedule, "3.39.0") +pulse_schedule_3_39 = imas.convert_ids(pulse_schedule, "3.39.0") # Check that the data is converted -imaspy.util.print_tree(pulse_schedule_3_39) +imas.util.print_tree(pulse_schedule_3_39) # 5. Update time data pulse_schedule.time[1] = 3 @@ -41,7 +41,7 @@ print(pulse_schedule_3_39.ids_properties.comment) # What do you notice? # This prints the original value of the comment ("Testing renamed IDS -# nodes with IMASPy"). +# nodes with imas-python"). # This is actually the same that you get when creating a shallow copy # with ``copy.copy`` of a regular Python dictionary: import copy @@ -60,7 +60,7 @@ # 7. Set phase.reference_name: pulse_schedule.ec.antenna[0].phase.reference_name = "Test refname" # And convert again -pulse_schedule_3_39 = imaspy.convert_ids(pulse_schedule, "3.39.0") -imaspy.util.print_tree(pulse_schedule_3_39) +pulse_schedule_3_39 = imas.convert_ids(pulse_schedule, "3.39.0") +imas.util.print_tree(pulse_schedule_3_39) # What do you notice? # Element 'ec/antenna/phase' does not exist in the target IDS. Data is not copied. diff --git a/docs/source/courses/advanced/imaspy_snippets/ids_to_xarray.py b/docs/source/courses/advanced/imas_snippets/ids_to_xarray.py similarity index 92% rename from docs/source/courses/advanced/imaspy_snippets/ids_to_xarray.py rename to docs/source/courses/advanced/imas_snippets/ids_to_xarray.py index 717db8dd..89a94b01 100644 --- a/docs/source/courses/advanced/imaspy_snippets/ids_to_xarray.py +++ b/docs/source/courses/advanced/imas_snippets/ids_to_xarray.py @@ -9,12 +9,12 @@ import matplotlib.pyplot as plt import numpy -import imaspy -import imaspy.training +import imas +import imas.training import xarray # 1. Load core_profiles IDS from training DBEntry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() cp = entry.get("core_profiles") # 2. Store the temperature of the first time slice diff --git a/docs/source/courses/advanced/imaspy_snippets/tensorized_ids_to_xarray.py b/docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py similarity index 94% rename from docs/source/courses/advanced/imaspy_snippets/tensorized_ids_to_xarray.py rename to docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py index fe7bbb9c..ff4f4e28 100644 --- a/docs/source/courses/advanced/imaspy_snippets/tensorized_ids_to_xarray.py +++ b/docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py @@ -9,12 +9,12 @@ import matplotlib.pyplot as plt import numpy -import imaspy -import imaspy.training +import imas +import imas.training import xarray # 1. Load core_profiles IDS from training DBEntry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() cp = entry.get("core_profiles") # 2. Store the temperature of the first time slice diff --git a/docs/source/courses/advanced/metadata.rst b/docs/source/courses/advanced/metadata.rst index c8eeba2e..42cb6abc 100644 --- a/docs/source/courses/advanced/metadata.rst +++ b/docs/source/courses/advanced/metadata.rst @@ -3,13 +3,13 @@ Using Data Dictionary metadata ============================== -IMASPy provides convenient access to Data Dictionary metadata of any IDS node through +imas-python provides convenient access to Data Dictionary metadata of any IDS node through the ``metadata`` attribute: .. code-block:: python - >>> import imaspy - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> import imas + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.metadata >>> core_profiles.time.metadata @@ -23,21 +23,21 @@ cases. Overview of available metadata ------------------------------ -The data dictionary metadata that is parsed by IMASPy is listed in the API -documentation for :py:class:`~imaspy.ids_metadata.IDSMetadata`. +The data dictionary metadata that is parsed by imas-python is listed in the API +documentation for :py:class:`~imas.ids_metadata.IDSMetadata`. -Note that not all metadata from the IMAS Data Dictionary is parsed by IMASPy. +Note that not all metadata from the IMAS Data Dictionary is parsed by imas-python. This metadata is still accessible on the :code:`metadata` attribute. You can use -:py:func:`imaspy.util.inspect` to get an overview of all metadata associated to an +:py:func:`imas.util.inspect` to get an overview of all metadata associated to an element in an IDS. .. code-block:: python :caption: Example showing all metadata for some ``core_profiles`` elements. - >>> import imaspy - >>> core_profiles = imaspy.IDSFactory().core_profiles() - >>> imaspy.util.inspect(core_profiles.metadata) - ╭---- -----╮ + >>> import imas + >>> core_profiles = imas.IDSFactory().core_profiles() + >>> imas.util.inspect(core_profiles.metadata) + ╭---- -----╮ │ Container for IDS Metadata │ │ │ │ ╭------------------------------------------------╮ │ @@ -63,8 +63,8 @@ element in an IDS. │ type = │ │ units = '' │ ╰----------------------------------------------------╯ - >>> imaspy.util.inspect(core_profiles.time.metadata) - ╭------ -------╮ + >>> imas.util.inspect(core_profiles.time.metadata) + ╭------ -------╮ │ Container for IDS Metadata │ │ │ │ ╭----------------------------------------------------╮ │ @@ -108,7 +108,7 @@ quite complicated, but summarized they come in two categories: values per pixel, and another variable storing some processed quantities per pixel. In this case, the coordinates are indices (line / column index of the pixel), but these must be the same for both quantities. This information is stored in the - :py:attr:`~imaspy.ids_metadata.IDSMetadata.coordinates_same_as` metadata. + :py:attr:`~imas.ids_metadata.IDSMetadata.coordinates_same_as` metadata. 2. Coordinates are other quantities in the Data Dictionary. @@ -140,7 +140,7 @@ Exercise 1: Using coordinates do you notice? c. Change the time mode of the IDS from homogeneous time to heterogeneous time. You do this by setting - ``ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HETEROGENEOUS``. + ``ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS``. Print the coordinate of the ``profiles_1d`` array of structure again. What has changed? @@ -151,7 +151,7 @@ Exercise 1: Using coordinates .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/coordinates.py + .. literalinclude:: imas_snippets/coordinates.py Exercise 2: Alternative coordinates @@ -176,7 +176,7 @@ Exercise 2: Alternative coordinates 1. Resize the array of structures so you can access the metadata of the elements. 2. Use the indexing operator on - :py:class:`~imaspy.ids_metadata.IDSMetadata`. For example, + :py:class:`~imas.ids_metadata.IDSMetadata`. For example, ``distributions.metadata["distribution/wave"]`` to get the metadata of the ``distribution[]/wave`` array of structures. 3. Resize the ``distribution`` and ``distribution[0].profiles_2d`` arrays of @@ -185,17 +185,17 @@ Exercise 2: Alternative coordinates you notice? 4. You can still use the metadata to go to the coordinate node options: - a. Use the :py:attr:`~imaspy.ids_coordinates.IDSCoordinate.references` - attribute of the :py:class:`~imaspy.ids_coordinates.IDSCoordinate` + a. Use the :py:attr:`~imas.ids_coordinates.IDSCoordinate.references` + attribute of the :py:class:`~imas.ids_coordinates.IDSCoordinate` objects in the ``metadata`` to get the paths to each of the coordinate - options. This will give you the :py:class:`~imaspy.ids_path.IDSPath` + options. This will give you the :py:class:`~imas.ids_path.IDSPath` objects for each coordinate option. - b. Then, use :py:meth:`IDSPath.goto ` to go + b. Then, use :py:meth:`IDSPath.goto ` to go to the corresponding IDS node. .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/alternative_coordinates.py + .. literalinclude:: imas_snippets/alternative_coordinates.py Units and dimensional analysis with Pint @@ -208,8 +208,8 @@ Units and dimensional analysis with Pint `_. The Data Dictionary specifies the units of stored quantities. This metadata is -accessible in IMASPy via :py:attr:`metadata.units -`. In most cases, these units are in a format +accessible in imas-python via :py:attr:`metadata.units +`. In most cases, these units are in a format that ``pint`` can understand (for example ``T``, ``Wb``, ``m^-3``, ``m.s^-1``). There are some exceptions to that, with the main ones ``-`` (indicating a quantity is @@ -220,7 +220,7 @@ go into that in this lesson. For conversion of units from the Data Dictionary format to pint units, we recommend creating a custom function, such as the following: -.. literalinclude:: imaspy_snippets/calc_with_units.py +.. literalinclude:: imas_snippets/calc_with_units.py :caption: Convert DD units to Pint Units :start-at: # Create pint UnitRegistry :end-before: # End @@ -253,4 +253,4 @@ Exercise 3: Calculate the mass density from ``core_profiles/profiles_1d`` .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/calc_with_units.py + .. literalinclude:: imas_snippets/calc_with_units.py diff --git a/docs/source/courses/advanced/xarray.rst b/docs/source/courses/advanced/xarray.rst index a8ab68c2..e1fb5498 100644 --- a/docs/source/courses/advanced/xarray.rst +++ b/docs/source/courses/advanced/xarray.rst @@ -4,7 +4,7 @@ Create ``xarray.DataArray`` from an IDS .. info:: In this lesson you will create a ``DataArray`` manually. In a future version of - IMASPy we plan to include functionality that will automatically do this for you. + imas-python we plan to include functionality that will automatically do this for you. That should further simplify working with data inside IDSs. Let's start with an introduction of Xarray. According to `their website @@ -61,7 +61,7 @@ Exercise 1: create a ``DataArray`` for ``profiles_1d/temperature`` .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/ids_to_xarray.py + .. literalinclude:: imas_snippets/ids_to_xarray.py Exercise 2: include the ``time`` axis in the ``DataArray`` @@ -96,4 +96,4 @@ the ``profiles_1d`` array of structures. When the grid is not changing in the ID .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/tensorized_ids_to_xarray.py + .. literalinclude:: imas_snippets/tensorized_ids_to_xarray.py diff --git a/docs/source/courses/advanced_user_training.rst b/docs/source/courses/advanced_user_training.rst index 8f11558a..c91be432 100644 --- a/docs/source/courses/advanced_user_training.rst +++ b/docs/source/courses/advanced_user_training.rst @@ -1,9 +1,9 @@ -Advanced IMASPy -=============== +Advanced imas-python +==================== -In this IMASPy training, we dive into more advanced features of IMASPy. It is assumed -you are familiar with the basic features of IMASPy, which are introduced in the -:ref:`IMASPy 101` training. +In this imas-python training, we dive into more advanced features of imas-python. It is assumed +you are familiar with the basic features of imas-python, which are introduced in the +:ref:`imas-python 101` training. .. note:: diff --git a/docs/source/courses/basic/al4_snippets/create_core_profiles.py b/docs/source/courses/basic/al4_snippets/create_core_profiles.py deleted file mode 100644 index 39c8aac5..00000000 --- a/docs/source/courses/basic/al4_snippets/create_core_profiles.py +++ /dev/null @@ -1,40 +0,0 @@ -import datetime - -import imas -import numpy as np - - -cp = imas.core_profiles() - -# Set properties -cp.ids_properties.homogeneous_time = imas.imasdef.IDS_TIME_MODE_HOMOGENEOUS -cp.ids_properties.comment = "Synthetic IDS created for the IMASPy course" -cp.ids_properties.creation_date = datetime.date.today().isoformat() - -# Set a time array -cp.time = np.array([1.0, 2.5, 4.0]) - -# Main coordinate -rho_tor_norm = np.linspace(0, 1, num=64) - -# Generate some 1D profiles -cp.profiles_1d.resize(len(cp.time)) -for index, t in enumerate(cp.time): - t_e = np.exp(-16 * rho_tor_norm**2) + (1 - np.exp(4 * rho_tor_norm - 3)) * t / 8 - t_e *= t * 500 - # Store the generated t_e as electron temperature - cp.profiles_1d[index].electrons.temperature = t_e - -# Validate the IDS for consistency -# cp.validate() # <-- not available in AL4 - -# Fill in the missing rho_tor_norm coordinate -for index in range(3): - cp.profiles_1d[index].grid.rho_tor_norm = rho_tor_norm - -# Create a new data entry for storing the IDS -pulse, run, database = 1, 1, "imaspy-course" -entry = imas.DBEntry(imas.imasdef.ASCII_BACKEND, database, pulse, run) -entry.create() - -entry.put(cp) diff --git a/docs/source/courses/basic/al4_snippets/iterate_core_profiles.py b/docs/source/courses/basic/al4_snippets/iterate_core_profiles.py deleted file mode 100644 index dad52da1..00000000 --- a/docs/source/courses/basic/al4_snippets/iterate_core_profiles.py +++ /dev/null @@ -1,12 +0,0 @@ -import imas -import imaspy.training - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -cp = entry.get("core_profiles") -for el in ["profiles_1d", "global_quantities", "code"]: - try: - print(getattr(cp, el)) - except NameError: - print(f"Could not print {el}, internal IMAS error") diff --git a/docs/source/courses/basic/al4_snippets/plot_core_profiles_ne_timeslice.py b/docs/source/courses/basic/al4_snippets/plot_core_profiles_ne_timeslice.py deleted file mode 100644 index 9da796f8..00000000 --- a/docs/source/courses/basic/al4_snippets/plot_core_profiles_ne_timeslice.py +++ /dev/null @@ -1,35 +0,0 @@ -import os - -import matplotlib -import imas -import imaspy.training - -# To avoid possible display issues when Matplotlib uses a non-GUI backend -if "DISPLAY" not in os.environ: - matplotlib.use("agg") -else: - matplotlib.use("TKagg") - -import matplotlib.pyplot as plt - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -# Read n_e profile and the associated normalised toroidal flux coordinate at -t = 443 # seconds - -cp = entry.get_slice("core_profiles", t, imas.imasdef.CLOSEST_INTERP) - -# profiles_1d should only contain the requested slice -assert len(cp.profiles_1d) == 1 - -ne = cp.profiles_1d[0].electrons.density -rho = cp.profiles_1d[0].grid.rho_tor_norm - -# Plot the figure -fig, ax = plt.subplots() -ax.plot(rho, ne) -ax.set_ylabel(r"$n_e$") -ax.set_xlabel(r"$\rho_{tor, norm}$") -ax.ticklabel_format(axis="y", scilimits=(-1, 1)) -plt.show() diff --git a/docs/source/courses/basic/al4_snippets/print_idss.py b/docs/source/courses/basic/al4_snippets/print_idss.py deleted file mode 100644 index 05de3094..00000000 --- a/docs/source/courses/basic/al4_snippets/print_idss.py +++ /dev/null @@ -1,5 +0,0 @@ -from imas.ids_names import IDSName - -# As each imas module is compiled with a specific DD version, we can load the -# names from the module itself -print([name.value for name in IDSName]) diff --git a/docs/source/courses/basic/al4_snippets/read_core_profiles_ne_timeslice.py b/docs/source/courses/basic/al4_snippets/read_core_profiles_ne_timeslice.py deleted file mode 100644 index 6d7b0fda..00000000 --- a/docs/source/courses/basic/al4_snippets/read_core_profiles_ne_timeslice.py +++ /dev/null @@ -1,21 +0,0 @@ -import imas -import imaspy.training - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -# Read n_e profile and the associated normalised toroidal flux coordinate at -t = 443 # seconds - -cp = entry.get_slice("core_profiles", t, imas.imasdef.CLOSEST_INTERP) - -# profiles_1d should only contain the requested slice -assert len(cp.profiles_1d) == 1 - -ne = cp.profiles_1d[0].electrons.density -rho = cp.profiles_1d[0].grid.rho_tor_norm -print("ne =", ne) -print("rho =", rho) - -# Close the datafile -entry.close() diff --git a/docs/source/courses/basic/al4_snippets/read_equilibrium_time_array.py b/docs/source/courses/basic/al4_snippets/read_equilibrium_time_array.py deleted file mode 100644 index 2347e44f..00000000 --- a/docs/source/courses/basic/al4_snippets/read_equilibrium_time_array.py +++ /dev/null @@ -1,26 +0,0 @@ -import numpy as np -import imas -import imaspy.training - - -# Find nearest value and index in an array -def find_nearest(a, a0): - "Element in nd array `a` closest to the scalar value `a0`" - idx = np.abs(a - a0).argmin() - return a[idx], idx - - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -# Read the time array from the equilibrium IDS -equilibrium = entry.get("equilibrium") # All time slices -time_array = equilibrium.time - -# Find the index of the desired time slice in the time array -t_closest, t_index = find_nearest(time_array, 433) -print("Time index = ", t_index) -print("Time value = ", t_closest) - -# Close input data entry -entry.close() diff --git a/docs/source/courses/basic/al4_snippets/read_whole_equilibrium.py b/docs/source/courses/basic/al4_snippets/read_whole_equilibrium.py deleted file mode 100644 index d7df1b22..00000000 --- a/docs/source/courses/basic/al4_snippets/read_whole_equilibrium.py +++ /dev/null @@ -1,20 +0,0 @@ -import imas -import imaspy.training - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -# 1. Read and print the time of the equilibrium IDS for the whole scenario -equilibrium = entry.get("equilibrium") # All time slices -# 2. Print the time array: -print(equilibrium.time) - -# 3. Load the core_profiles IDS -core_profiles = entry.get("core_profiles") -# 4. When you inspect the core_profiles.time array, you'll find that item [1] -# corresponds to t ~ 433s. -# 5. Print the electron temperature -print(core_profiles.profiles_1d[1].electrons.temperature) - -# Close input data entry -entry.close() diff --git a/docs/source/courses/basic/al4_snippets/transform_grid.py b/docs/source/courses/basic/al4_snippets/transform_grid.py deleted file mode 100644 index 08a572f4..00000000 --- a/docs/source/courses/basic/al4_snippets/transform_grid.py +++ /dev/null @@ -1,105 +0,0 @@ -import os - -import imas -import imaspy.training -import matplotlib -import numpy as np -from scipy.interpolate import RegularGridInterpolator - -if "DISPLAY" not in os.environ: - matplotlib.use("agg") -else: - matplotlib.use("TKagg") - -import matplotlib.pyplot as plt - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -# Use a partial get to retrieve the full time vector -input_times = entry.partial_get("equilibrium", "time") - -# Create output data entry -output_entry = imas.DBEntry( - imas.imasdef.MEMORY_BACKEND, "imaspy-course", 2, 1) -output_entry.create() - -# Loop over each time slice -for time in input_times: - eq = entry.get_slice("equilibrium", time, imas.imasdef.CLOSEST_INTERP) - - # Update comment - eq.ids_properties.comment = "IMASPy training: transform coordinate system" - - p2d = eq.time_slice[0].profiles_2d[0] - r, z = p2d.grid.dim1, p2d.grid.dim2 - r_axis = eq.time_slice[0].global_quantities.magnetic_axis.r - z_axis = eq.time_slice[0].global_quantities.magnetic_axis.z - - # Create new rho/theta coordinates - theta = np.linspace(-np.pi, np.pi, num=64, endpoint=False) - max_rho = min( - r_axis - r[0], - r[-1] - r_axis, - z_axis - z[0], - z[-1] - z_axis, - ) - rho = np.linspace(0, max_rho, num=64) - - # Calculate corresponding R/Z for interpolating the original values - rho_grid, theta_grid = np.meshgrid(rho, theta, indexing="ij", sparse=True) - grid_r = r_axis + rho_grid * np.cos(theta_grid) - grid_z = z_axis + rho_grid * np.sin(theta_grid) - interpolation_points = np.dstack((grid_r.flatten(), grid_z.flatten())) - - # Interpolate all data nodes on the new grid - for data_node in ["b_field_r", "b_field_z", "b_field_tor", "psi"]: - data = getattr(p2d, data_node) - interp = RegularGridInterpolator((r, z), data) - new_data = interp(interpolation_points).reshape(grid_r.shape) - setattr(p2d, data_node, new_data) - - # Update coordinate identifier - p2d.grid_type.index = 2 - p2d.grid_type.name = "inverse" - p2d.grid_type.description = "Rhopolar_polar 2D polar coordinates (rho=dim1, theta=dim2) with magnetic axis as centre of grid; theta and values following the COCOS=11 convention; the polar angle is theta=atan2(z-zaxis,r-raxis)" # noqa: E501 - - # Update coordinates - p2d.grid.dim1 = rho - p2d.grid.dim2 = theta - p2d.r = grid_r - p2d.z = grid_z - - # Finally, put the slice to disk - output_entry.put_slice(eq) - -# Create a plot to verify the transformation is correct -fig, (ax1, ax2, ax3) = plt.subplots(1, 3) - -vmin, vmax = np.min(data), np.max(data) -contour_levels = np.linspace(vmin, vmax, 32) - -rzmesh = np.meshgrid(r, z, indexing="ij") -mesh = ax1.pcolormesh(*rzmesh, data, vmin=vmin, vmax=vmax) -ax1.contour(*rzmesh, data, contour_levels, colors='black') - -ax2.pcolormesh(grid_r, grid_z, new_data, vmin=vmin, vmax=vmax) -ax2.contour(grid_r, grid_z, new_data, contour_levels, colors='black') - -rho_theta_mesh = np.meshgrid(rho, theta, indexing="ij") -ax3.pcolormesh(*rho_theta_mesh, new_data, vmin=vmin, vmax=vmax) -ax3.contour(*rho_theta_mesh, new_data, contour_levels, colors='black') - -ax1.set_xlabel("r [m]") -ax2.set_xlabel("r [m]") -ax1.set_ylabel("z [m]") -ax2.set_xlim(ax1.get_xlim()) -ax2.set_ylim(ax1.get_ylim()) -ax3.set_xlabel(r"$\rho$ [m]") -ax3.set_ylabel(r"$\theta$ [rad]") - -fig.suptitle(r"$\psi$ in ($r,z$) and ($\rho,\theta$) coordinates.") -fig.colorbar(mesh, ax=ax3) -fig.tight_layout() - -plt.show() diff --git a/docs/source/courses/basic/analyze.rst b/docs/source/courses/basic/analyze.rst index eb761580..a17fa20b 100644 --- a/docs/source/courses/basic/analyze.rst +++ b/docs/source/courses/basic/analyze.rst @@ -1,5 +1,5 @@ -Analyze with IMASPy -=================== +Analyze with imas-python +======================== For this part of the training we will learn to open an IMAS database entry, and plot some basic data in it using `matplotlib `_. @@ -12,26 +12,26 @@ Open an IMAS database entry IMAS explicitly separates the data on disk from the data in memory. To get started we load an existing IMAS data file from disk. The on-disk file -is represented by an :class:`imaspy.DBEntry `, which we have to -:meth:`~imaspy.db_entry.DBEntry.open()` to get a reference to the data file we +is represented by an :class:`imas.DBEntry `, which we have to +:meth:`~imas.db_entry.DBEntry.open()` to get a reference to the data file we will manipulate. The connection to the data file is kept intact until we -:meth:`~imaspy.db_entry.DBEntry.close()` the file. Note that the on-disk file -will not be changed until an explicit :meth:`~imaspy.db_entry.DBEntry.put()` or -:meth:`~imaspy.db_entry.DBEntry.put_slice()` is called. -We load data in memory with the :meth:`~imaspy.db_entry.DBEntry.get()` and -:meth:`~imaspy.db_entry.DBEntry.get_slice()` methods, after which we +:meth:`~imas.db_entry.DBEntry.close()` the file. Note that the on-disk file +will not be changed until an explicit :meth:`~imas.db_entry.DBEntry.put()` or +:meth:`~imas.db_entry.DBEntry.put_slice()` is called. +We load data in memory with the :meth:`~imas.db_entry.DBEntry.get()` and +:meth:`~imas.db_entry.DBEntry.get_slice()` methods, after which we can use the data. .. hint:: - Use the ASCII data supplied with IMASPy for all exercises. It contains two + Use the ASCII data supplied with imas-python for all exercises. It contains two IDSs (``equilibrium`` and ``core_profiles``) filled with data from three time slices of ITER reference data. Two convenience methods are available in the - :mod:`imaspy.training` module to open the DBEntry for this training data. + :mod:`imas.training` module to open the DBEntry for this training data. - 1. :meth:`imaspy.training.get_training_db_entry()` returns an opened - ``imaspy.DBEntry`` object. Use this method if you want to use the IMASPy + 1. :meth:`imas.training.get_training_db_entry()` returns an opened + ``imas.DBEntry`` object. Use this method if you want to use the imas-python interface. - 2. :meth:`imaspy.training.get_training_imas_db_entry()` returns an opened + 2. :meth:`imas.training.get_training_imas_db_entry()` returns an opened ``imas.DBEntry`` object. Use this method if you want to use the Python Access Layer interface. @@ -42,10 +42,10 @@ Exercise 1 .. md-tab-item:: Exercise - Open the training database entry: ``entry = imaspy.training.get_training_db_entry()`` + Open the training database entry: ``entry = imas.training.get_training_db_entry()`` 1. Load the ``equilibrium`` IDS into memory using the - :meth:`entry.get ` method + :meth:`entry.get ` method 2. Read and print the ``time`` array of the ``equilibrium`` IDS 3. Load the ``core_profiles`` IDS into memory 4. Explore the ``core_profiles.profiles_1d`` property and try to match @@ -69,13 +69,9 @@ Exercise 1 ``core_profiles.profiles_1d[i].electrons.temperature``) from the ``core_profiles`` IDS at time slice :math:`t\approx 433\,\mathrm{s}` - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/read_whole_equilibrium.py - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/read_whole_equilibrium.py + .. literalinclude:: imas_snippets/read_whole_equilibrium.py .. caution:: When dealing with unknown data, you shouldn't blindly ``get()`` all data: @@ -84,7 +80,7 @@ Exercise 1 The recommendations for larger data files are: - Only load the time slice(s) that you are interested in. - - Alternatively, IMASPy allows to load data on-demand, see + - Alternatively, imas-python allows to load data on-demand, see :ref:`Lazy loading` for more details. @@ -108,28 +104,25 @@ Exercise 2 Now the index of the closest time slice can be found with :external:func:`numpy.argmin`. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/read_equilibrium_time_array.py - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/read_equilibrium_time_array.py + .. literalinclude:: imas_snippets/read_equilibrium_time_array.py .. attention:: - IMASPy objects mostly behave the same way as numpy arrays. However, in some cases - functions explicitly expect a pure numpy array and supplying an IMASPy object raises + imas-python objects mostly behave the same way as numpy arrays. However, in some cases + functions explicitly expect a pure numpy array and supplying an imas-python object raises an exception. When this is the case, the ``.value`` attribute can be used to obtain the underlying data. .. note:: - IMASPy has two main ways of accessing IDSs. In the exercises above, we used + imas-python has two main ways of accessing IDSs. In the exercises above, we used the "attribute-like" access. This is the main way of navigating the IDS tree. - However, IMASPy also provides a "dict-like" interface to access data, which + However, imas-python also provides a "dict-like" interface to access data, which might be more convenient in some cases. For example: - .. literalinclude:: imaspy_snippets/iterate_core_profiles.py + .. literalinclude:: imas_snippets/iterate_core_profiles.py Retreiving part of an IDS @@ -148,7 +141,7 @@ Retrieve a single time slice When we are interested in quantities at a single time slice (or a low number of time slices), we can decide to only load the data at specified times. This can be -accomplished with the aforementioned :meth:`~imaspy.db_entry.DBEntry.get_slice()` +accomplished with the aforementioned :meth:`~imas.db_entry.DBEntry.get_slice()` method. @@ -159,23 +152,19 @@ Exercise 3 .. md-tab-item:: Exercise - Use the :meth:`~imaspy.db_entry.DBEntry.get_slice()` method to obtain the electron density + Use the :meth:`~imas.db_entry.DBEntry.get_slice()` method to obtain the electron density :math:`n_e` at :math:`t\approx 433\,\mathrm{s}`. .. hint:: :collapsible: - :meth:`~imaspy.db_entry.DBEntry.get_slice()` requires an ``interpolation_method`` as one - of its arguments, here you can use ``imas.imasdef.CLOSEST_INTERP``. Alternatively, - if you use IMASPy, you can use ``imaspy.ids_defs.CLOSEST_INTERP``. + :meth:`~imas.db_entry.DBEntry.get_slice()` requires an ``interpolation_method`` as one + of its arguments, here you can use ``imas.ids_defs.CLOSEST_INTERP``. - .. md-tab-item:: AL4 - .. literalinclude:: al4_snippets/read_core_profiles_ne_timeslice.py + .. md-tab-item:: imas-python - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/read_core_profiles_ne_timeslice.py + .. literalinclude:: imas_snippets/read_core_profiles_ne_timeslice.py .. attention:: @@ -196,13 +185,9 @@ Exercise 4 Using ``matplotlib``, create a plot of :math:`n_e` on the y-axis and :math:`\rho_{tor, norm}` on the x-axis at :math:`t=433\mathrm{s}` - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/plot_core_profiles_ne_timeslice.py - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/plot_core_profiles_ne_timeslice.py + .. literalinclude:: imas_snippets/plot_core_profiles_ne_timeslice.py .. md-tab-item:: Plot @@ -220,9 +205,9 @@ When you are interested in the time evolution of a quantity, using ``get_slice`` impractical. It gets around the limitation of the data not fitting in memory, but will still need to read all of the data from disk (just not at once). -IMASPy has a `lazy loading` mode, where it will only read the requested data from disk +imas-python has a `lazy loading` mode, where it will only read the requested data from disk when you try to access it. You can enable it by supplying ``lazy=True`` to a call to -:meth:`~imaspy.db_entry.DBEntry.get()` or :meth:`~imaspy.db_entry.DBEntry.get_slice()`. +:meth:`~imas.db_entry.DBEntry.get()` or :meth:`~imas.db_entry.DBEntry.get_slice()`. Exercise 5 @@ -241,16 +226,16 @@ Exercise 5 the ITER cluster, you can load the following data entry with much more data, to better notice the difference that lazy loading can make:: - import imaspy - from imaspy.ids_defs import MDSPLUS_BACKEND + import imas + from imas.ids_defs import MDSPLUS_BACKEND database, pulse, run, user = "ITER", 134173, 106, "public" - data_entry = imaspy.DBEntry(MDSPLUS_BACKEND, database, pulse, run, user) + data_entry = imas.DBEntry(MDSPLUS_BACKEND, database, pulse, run, user) data_entry.open() - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/plot_core_profiles_te.py + .. literalinclude:: imas_snippets/plot_core_profiles_te.py .. md-tab-item:: Plot diff --git a/docs/source/courses/basic/create.rst b/docs/source/courses/basic/create.rst index 8fb438f0..2db00104 100644 --- a/docs/source/courses/basic/create.rst +++ b/docs/source/courses/basic/create.rst @@ -1,5 +1,5 @@ -Create with IMASPy -================== +Create with imas-python +======================= In this section of the training, we will have a look at creating (and filling) IDSs from scratch. @@ -7,8 +7,8 @@ scratch. Create an empty IDS ------------------- -Empty IDSs in IMASPy are created by the :py:meth:`~imaspy.ids_factory.IDSFactory.new` -method of an :py:class:`~imaspy.ids_factory.IDSFactory`. +Empty IDSs in imas-python are created by the :py:meth:`~imas.ids_factory.IDSFactory.new` +method of an :py:class:`~imas.ids_factory.IDSFactory`. .. note:: New IDSs can also be created by calling :code:`IDSFactory().()`, similar @@ -24,14 +24,9 @@ Exercise 1 Create an empty ``core_profiles`` IDS. - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/create_core_profiles.py - :end-before: # Set properties - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py :end-before: # Set properties @@ -42,7 +37,7 @@ Now we have an empty IDS, we can start filling fields. For this exercise we will populate the following fields: - ``ids_properties.homogeneous_time``, which we will set to the constant - :py:const:`~imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS`. This flags that this IDS is in + :py:const:`~imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS`. This flags that this IDS is in homogeneous time mode, meaning that all time-dependent quantities use the root ``time`` as their coordinate. - ``ids_properties.comment``, where we can describe this IDS. @@ -63,27 +58,16 @@ Exercise 2 Fill the ``core_profiles`` IDS with the fields as described above. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/create_core_profiles.py - :start-at: # Set properties - :end-before: # Validate the IDS for consistency - - .. note:: - - When using the IMAS access layer instead of IMASPy, we must always create - numpy arrays (of the correct data type). IMASPy will - automatically convert your data to the appropriate numpy array. - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Set properties :end-before: # Validate the IDS for consistency .. note:: - Observe that we can assign a Python list to ``cp.time``. IMASPy will + Observe that we can assign a Python list to ``cp.time``. imas-python will automatically convert it to a numpy array. @@ -105,17 +89,9 @@ Exercise 3 Validate the just-filled IDS. - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - IDS validation is an Access Layer 5 feature, and cannot be done in Al4. - - .. literalinclude:: al4_snippets/create_core_profiles.py - :start-at: # Validate the IDS for consistency - :end-before: # Fill in the missing rho_tor_norm coordinate - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Validate the IDS for consistency :end-before: # Fill in the missing rho_tor_norm coordinate @@ -139,15 +115,9 @@ Exercise 4 Fix the coordinate consistency error. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/create_core_profiles.py - :start-at: # Fill in the missing rho_tor_norm coordinate - :end-before: # Create a new data entry for storing the IDS - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Fill in the missing rho_tor_norm coordinate :end-before: # Create a new data entry for storing the IDS @@ -157,9 +127,9 @@ Store the IDS on disk Now we have created, filled and validated an IDS, the only thing left is to store it to disk. Like loading IDSs, storing IDSs is achieved through the -:py:class:`~imaspy.db_entry.DBEntry` class. After constructing a ``DBEntry`` object, you -need to :py:meth:`~imaspy.db_entry.DBEntry.create` the data entry on-disk before you can -:py:meth:`~imaspy.db_entry.DBEntry.put` the IDS to disk. +:py:class:`~imas.db_entry.DBEntry` class. After constructing a ``DBEntry`` object, you +need to :py:meth:`~imas.db_entry.DBEntry.create` the data entry on-disk before you can +:py:meth:`~imas.db_entry.DBEntry.put` the IDS to disk. .. note:: For this exercise we will use the ASCII backend. Although it doesn't have the best @@ -178,30 +148,23 @@ Exercise 5 The recommended parameters for this exercise are:: - backend = imaspy.ids_defs.ASCII_BACKEND - database = "imaspy-course" + backend = imas.ids_defs.ASCII_BACKEND + database = "imas-course" pulse = 1 run = 1 - After a successful ``put``, the ids file will be created. If you are using - AL4, you should see a file ``imaspy-course_1_1_core_profiles.ids`` in your - working directory with the contents of the IDS. - If you are using AL5, this file can be found under - ``~/public/imasdb/imaspy-course/3/1/1/core_profiles.ids`` + After a successful ``put``, the ids file will be created. + this file can be found under + ``~/public/imasdb/imas-course/3/1/1/core_profiles.ids`` .. hint:: :collapsible: - The signature of :meth:`~imaspy.db_entry.DBEntry()` is: ``DBEntry(backend, database, pulse, run)`` + The signature of :meth:`~imas.db_entry.DBEntry()` is: ``DBEntry(backend, database, pulse, run)`` - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/create_core_profiles.py - :start-at: # Create a new data entry for storing the IDS - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Create a new data entry for storing the IDS Summary @@ -220,10 +183,6 @@ Congratulations for completing this section of the course. You have: Click on the tabs to see the complete source, combining all exercises. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/create_core_profiles.py - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py diff --git a/docs/source/courses/basic/explore.rst b/docs/source/courses/basic/explore.rst index 39e2e3a8..f01e23d8 100644 --- a/docs/source/courses/basic/explore.rst +++ b/docs/source/courses/basic/explore.rst @@ -1,7 +1,7 @@ .. _`basic/explore`: -Explore with IMASPy -=================== +Explore with imas-python +======================== In this part of the training, we will learn how to use Python to explore data saved in IDSs. @@ -30,25 +30,20 @@ Exercise 1 Find out the names of the available IDSs. .. hint:: - The module ``imas.ids_names`` contains information on the available IDSs in - AL4. + The module ``imas.ids_names`` contains information on the available IDSs. - In IMASPy, you can use :py:class:`~imaspy.ids_factory.IDSFactory` to figure + In imas-python, you can use :py:class:`~imas.ids_factory.IDSFactory` to figure out which IDSs are avaible. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/print_idss.py - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/print_idss.py + .. literalinclude:: imas_snippets/print_idss.py Explore the structure and contents of an IDS -------------------------------------------- -IMASPy has several features and utilities for exploring an IDS. These are best used in +imas-python has several features and utilities for exploring an IDS. These are best used in an interactive Python console, such as the default python console or the `IPython `_ console. @@ -56,22 +51,22 @@ an interactive Python console, such as the default python console or the `IPytho Tab completion '''''''''''''' -As with most Python objects, you can use :kbd:`Tab` completion on IMASPy objects. +As with most Python objects, you can use :kbd:`Tab` completion on imas-python objects. .. note:: In the python console, you need to press :kbd:`Tab` twice to show suggestions. -- :py:class:`~imaspy.ids_factory.IDSFactory` has tab completion for IDS names: +- :py:class:`~imas.ids_factory.IDSFactory` has tab completion for IDS names: .. code-block:: pycon - >>> factory = imaspy.IDSFactory() + >>> factory = imas.IDSFactory() >>> factory.core_ factory.core_instant_changes( factory.core_sources( factory.core_profiles( factory.core_transport( -- :py:class:`~imaspy.ids_toplevel.IDSToplevel` and - :py:class:`~imaspy.ids_structure.IDSStructure` have tab completion for child nodes: +- :py:class:`~imas.ids_toplevel.IDSToplevel` and + :py:class:`~imas.ids_structure.IDSStructure` have tab completion for child nodes: .. image:: interactive_tab_core_profiles_toplevel.png @@ -79,25 +74,25 @@ As with most Python objects, you can use :kbd:`Tab` completion on IMASPy objects Interactive help '''''''''''''''' -Use the built-in :external:py:func:`help()` function to get more information on IMASPy +Use the built-in :external:py:func:`help()` function to get more information on imas-python functions, objects, etc. .. code-block:: pycon - >>> import imaspy - >>> help(imaspy.DBEntry) - Help on class DBEntry in module imaspy.db_entry: + >>> import imas + >>> help(imas.DBEntry) + Help on class DBEntry in module imas.db_entry: class DBEntry(builtins.object) [...] -Inspecting IMASPy objects -''''''''''''''''''''''''' +Inspecting imas-python objects +'''''''''''''''''''''''''''''' :kbd:`Tab` completion is nice when you already know more or less what attribute you are -looking for. For a more comprehensive overview of any IMASPy node, you can use -:py:meth:`imaspy.util.inspect` to show: +looking for. For a more comprehensive overview of any imas-python node, you can use +:py:meth:`imas.util.inspect` to show: 1. The path to the node (relative to the IDS it is contained in) 2. The Data Dictionary version @@ -108,7 +103,7 @@ looking for. For a more comprehensive overview of any IMASPy node, you can use .. hint:: - The output of :py:meth:`imaspy.util.inspect` is colored when your terminal supports + The output of :py:meth:`imas.util.inspect` is colored when your terminal supports it. You may use the environment variable ``NO_COLOR`` to disable colored output or ``FORCE_COLOR`` to force colored output. See ``_. @@ -118,13 +113,13 @@ looking for. For a more comprehensive overview of any IMASPy node, you can use .. rubric:: Examples -.. image:: imaspy_inspect.png +.. image:: imas_inspect.png Printing an IDS tree '''''''''''''''''''' -Another useful utility function in IMASPy is :py:meth:`imaspy.util.print_tree`. This +Another useful utility function in imas-python is :py:meth:`imas.util.print_tree`. This will print a complete tree structure of all non-empty quantities in the provided node. As an argument you can give a complete IDS, or any structure in the IDS such as ``ids_properties``: @@ -135,26 +130,26 @@ As an argument you can give a complete IDS, or any structure in the IDS such as Depending on the size of the IDS (structure) you print, this may generate a lot of output. For interactive exploration of large IDSs we recommend to use - :py:meth:`imaspy.util.inspect` (optionally with the parameter ``hide_empty_nodes`` - set to :code:`True`) and only use :py:meth:`imaspy.util.print_tree` for smaller + :py:meth:`imas.util.inspect` (optionally with the parameter ``hide_empty_nodes`` + set to :code:`True`) and only use :py:meth:`imas.util.print_tree` for smaller sub-structures. Find paths in an IDS '''''''''''''''''''' -In IMASPy you can also search for paths inside an IDS: -:py:meth:`imaspy.util.find_paths`. This can be useful when you know what quantity you +In imas-python you can also search for paths inside an IDS: +:py:meth:`imas.util.find_paths`. This can be useful when you know what quantity you are looking for, but aren't sure exactly in which (sub)structure of the IDS it is located. -:py:meth:`imaspy.util.find_paths` accepts any Python regular expression (see +:py:meth:`imas.util.find_paths` accepts any Python regular expression (see :external:py:mod:`re`) as input. This allows for anything from basic to advanced searches. .. rubric:: Examples -.. literalinclude:: imaspy_snippets/find_paths.py +.. literalinclude:: imas_snippets/find_paths.py Exercise 2 @@ -188,12 +183,12 @@ Exercise 2 .. md-tab-item:: Training data - .. literalinclude:: imaspy_snippets/explore_training_data.py + .. literalinclude:: imas_snippets/explore_training_data.py .. md-tab-item:: `pf_active` data - .. literalinclude:: imaspy_snippets/explore_public_pf_active.py + .. literalinclude:: imas_snippets/explore_public_pf_active.py .. md-tab-item:: `ec_launchers` data - .. literalinclude:: imaspy_snippets/explore_public_ec_launchers.py \ No newline at end of file + .. literalinclude:: imas_snippets/explore_public_ec_launchers.py \ No newline at end of file diff --git a/docs/source/courses/basic/imaspy_inspect.png b/docs/source/courses/basic/imas_inspect.png similarity index 100% rename from docs/source/courses/basic/imaspy_inspect.png rename to docs/source/courses/basic/imas_inspect.png diff --git a/docs/source/courses/basic/imaspy_snippets/create_core_profiles.py b/docs/source/courses/basic/imas_snippets/create_core_profiles.py similarity index 73% rename from docs/source/courses/basic/imaspy_snippets/create_core_profiles.py rename to docs/source/courses/basic/imas_snippets/create_core_profiles.py index 277e0c5e..b263299c 100644 --- a/docs/source/courses/basic/imaspy_snippets/create_core_profiles.py +++ b/docs/source/courses/basic/imas_snippets/create_core_profiles.py @@ -1,17 +1,17 @@ import datetime -import imaspy +import imas import numpy as np -factory = imaspy.IDSFactory() +factory = imas.IDSFactory() cp = factory.new("core_profiles") # Alternative cp = factory.core_profiles() # Set properties -cp.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS -cp.ids_properties.comment = "Synthetic IDS created for the IMASPy course" +cp.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS +cp.ids_properties.comment = "Synthetic IDS created for the imas-python course" cp.ids_properties.creation_date = datetime.date.today().isoformat() # Set a time array @@ -32,7 +32,7 @@ try: cp.validate() print("IDS is valid!") -except imaspy.exception.ValidationError as exc: +except imas.exception.ValidationError as exc: print("Oops, the IDS is not valid: ", exc) # Fill in the missing rho_tor_norm coordinate @@ -42,8 +42,8 @@ cp.validate() # Create a new data entry for storing the IDS -pulse, run, database = 1, 1, "imaspy-course" -entry = imaspy.DBEntry(imaspy.ids_defs.ASCII_BACKEND, database, pulse, run) +pulse, run, database = 1, 1, "imas-course" +entry = imas.DBEntry(imas.ids_defs.ASCII_BACKEND, database, pulse, run) entry.create() entry.put(cp) diff --git a/docs/source/courses/basic/imas_snippets/explore_public_ec_launchers.py b/docs/source/courses/basic/imas_snippets/explore_public_ec_launchers.py new file mode 100644 index 00000000..e5b61553 --- /dev/null +++ b/docs/source/courses/basic/imas_snippets/explore_public_ec_launchers.py @@ -0,0 +1,15 @@ +import imas.util + +# Open input data entry +entry = imas.DBEntry( + imas.ids_defs.HDF5_BACKEND, "ITER_MD", 120000, 204, "public", data_version="3" +) +entry.open() + +# Get the ec_launchers IDS +pf = entry.get("ec_launchers") + +# Inspect the IDS +imas.util.inspect(pf, hide_empty_nodes=True) + +entry.close() diff --git a/docs/source/courses/basic/imas_snippets/explore_public_pf_active.py b/docs/source/courses/basic/imas_snippets/explore_public_pf_active.py new file mode 100644 index 00000000..65b2fc21 --- /dev/null +++ b/docs/source/courses/basic/imas_snippets/explore_public_pf_active.py @@ -0,0 +1,15 @@ +import imas.util + +# Open input data entry +entry = imas.DBEntry( + imas.ids_defs.HDF5_BACKEND, "ITER_MD", 111001, 103, "public", data_version="3" +) +entry.open() + +# Get the pf_active IDS +pf = entry.get("pf_active") + +# Inspect the IDS +imas.util.inspect(pf, hide_empty_nodes=True) + +entry.close() diff --git a/docs/source/courses/basic/imas_snippets/explore_training_data.py b/docs/source/courses/basic/imas_snippets/explore_training_data.py new file mode 100644 index 00000000..e60279af --- /dev/null +++ b/docs/source/courses/basic/imas_snippets/explore_training_data.py @@ -0,0 +1,13 @@ +import imas.util +import imas.training + +# Open input data entry +entry = imas.training.get_training_db_entry() + +# Get the core_profiles IDS +cp = entry.get("core_profiles") + +# Inspect the IDS +imas.util.inspect(cp, hide_empty_nodes=True) + +entry.close() \ No newline at end of file diff --git a/docs/source/courses/basic/imas_snippets/find_paths.py b/docs/source/courses/basic/imas_snippets/find_paths.py new file mode 100644 index 00000000..e70c6c6d --- /dev/null +++ b/docs/source/courses/basic/imas_snippets/find_paths.py @@ -0,0 +1,16 @@ +import imas.util + +factory = imas.IDSFactory() +core_profiles = factory.core_profiles() + +print("Paths containing `rho`:") +print(imas.util.find_paths(core_profiles, "rho")) +print() + +print("Paths containing `rho`, not followed by `error`:") +print(imas.util.find_paths(core_profiles, "rho(?!.*error)")) +print() + +print("All paths ending with `time`:") +print(imas.util.find_paths(core_profiles, "time$")) +print() diff --git a/docs/source/courses/basic/imaspy_snippets/iterate_core_profiles.py b/docs/source/courses/basic/imas_snippets/iterate_core_profiles.py similarity index 77% rename from docs/source/courses/basic/imaspy_snippets/iterate_core_profiles.py rename to docs/source/courses/basic/imas_snippets/iterate_core_profiles.py index bfbd1860..9d315027 100644 --- a/docs/source/courses/basic/imaspy_snippets/iterate_core_profiles.py +++ b/docs/source/courses/basic/imas_snippets/iterate_core_profiles.py @@ -1,7 +1,7 @@ -import imaspy.training +import imas.training # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() cp = entry.get("core_profiles") for el in ["profiles_1d", "global_quantities", "code"]: diff --git a/docs/source/courses/basic/imaspy_snippets/plot_core_profiles_ne_timeslice.py b/docs/source/courses/basic/imas_snippets/plot_core_profiles_ne_timeslice.py similarity index 83% rename from docs/source/courses/basic/imaspy_snippets/plot_core_profiles_ne_timeslice.py rename to docs/source/courses/basic/imas_snippets/plot_core_profiles_ne_timeslice.py index 2df9ef8e..8c2beee5 100644 --- a/docs/source/courses/basic/imaspy_snippets/plot_core_profiles_ne_timeslice.py +++ b/docs/source/courses/basic/imas_snippets/plot_core_profiles_ne_timeslice.py @@ -1,7 +1,7 @@ import os import matplotlib -import imaspy.training +import imas.training # To avoid possible display issues when Matplotlib uses a non-GUI backend if "DISPLAY" not in os.environ: @@ -12,12 +12,12 @@ import matplotlib.pyplot as plt # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() # Read n_e profile and the associated normalised toroidal flux coordinate at t = 443 # seconds -cp = entry.get_slice("core_profiles", t, imaspy.ids_defs.CLOSEST_INTERP) +cp = entry.get_slice("core_profiles", t, imas.ids_defs.CLOSEST_INTERP) # profiles_1d should only contain the requested slice assert len(cp.profiles_1d) == 1 diff --git a/docs/source/courses/basic/imaspy_snippets/plot_core_profiles_te.py b/docs/source/courses/basic/imas_snippets/plot_core_profiles_te.py similarity index 92% rename from docs/source/courses/basic/imaspy_snippets/plot_core_profiles_te.py rename to docs/source/courses/basic/imas_snippets/plot_core_profiles_te.py index f620158e..44ac96c3 100644 --- a/docs/source/courses/basic/imaspy_snippets/plot_core_profiles_te.py +++ b/docs/source/courses/basic/imas_snippets/plot_core_profiles_te.py @@ -11,11 +11,11 @@ from matplotlib import pyplot as plt -import imaspy -from imaspy.ids_defs import MDSPLUS_BACKEND +import imas +from imas.ids_defs import MDSPLUS_BACKEND database, pulse, run, user = "ITER", 134173, 106, "public" -data_entry = imaspy.DBEntry( +data_entry = imas.DBEntry( MDSPLUS_BACKEND, database, pulse, run, user, data_version="3" ) data_entry.open() diff --git a/docs/source/courses/basic/imaspy_snippets/print_idss.py b/docs/source/courses/basic/imas_snippets/print_idss.py similarity index 71% rename from docs/source/courses/basic/imaspy_snippets/print_idss.py rename to docs/source/courses/basic/imas_snippets/print_idss.py index 1811398f..5cb3f1cc 100644 --- a/docs/source/courses/basic/imaspy_snippets/print_idss.py +++ b/docs/source/courses/basic/imas_snippets/print_idss.py @@ -1,10 +1,10 @@ -import imaspy +import imas -# IMASPy has multiple DD versions inside, which makes this exercise harder. +# imas-python has multiple DD versions inside, which makes this exercise harder. # We provide possible solutions here # Option 1: Print the IDSs in the default-selected DD version -factory = imaspy.IDSFactory() +factory = imas.IDSFactory() print("IDSs available in DD version", factory.version) print(factory.ids_names()) @@ -14,6 +14,6 @@ print() # Option 2: Print the IDSs in a specific DD version -factory = imaspy.IDSFactory("3.39.0") +factory = imas.IDSFactory("3.39.0") print("IDSs available in DD version", factory.version) print(list(factory)) diff --git a/docs/source/courses/basic/imaspy_snippets/read_core_profiles_ne_timeslice.py b/docs/source/courses/basic/imas_snippets/read_core_profiles_ne_timeslice.py similarity index 71% rename from docs/source/courses/basic/imaspy_snippets/read_core_profiles_ne_timeslice.py rename to docs/source/courses/basic/imas_snippets/read_core_profiles_ne_timeslice.py index 4818f841..d54ac922 100644 --- a/docs/source/courses/basic/imaspy_snippets/read_core_profiles_ne_timeslice.py +++ b/docs/source/courses/basic/imas_snippets/read_core_profiles_ne_timeslice.py @@ -1,12 +1,12 @@ -import imaspy.training +import imas.training # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() # Read n_e profile and the associated normalised toroidal flux coordinate at t = 443 # seconds -cp = entry.get_slice("core_profiles", t, imaspy.ids_defs.CLOSEST_INTERP) +cp = entry.get_slice("core_profiles", t, imas.ids_defs.CLOSEST_INTERP) # profiles_1d should only contain the requested slice assert len(cp.profiles_1d) == 1 diff --git a/docs/source/courses/basic/imaspy_snippets/read_equilibrium_time_array.py b/docs/source/courses/basic/imas_snippets/read_equilibrium_time_array.py similarity index 88% rename from docs/source/courses/basic/imaspy_snippets/read_equilibrium_time_array.py rename to docs/source/courses/basic/imas_snippets/read_equilibrium_time_array.py index eb1edae3..8fe482e9 100644 --- a/docs/source/courses/basic/imaspy_snippets/read_equilibrium_time_array.py +++ b/docs/source/courses/basic/imas_snippets/read_equilibrium_time_array.py @@ -1,5 +1,5 @@ import numpy as np -import imaspy.training +import imas.training # Find nearest value and index in an array @@ -10,7 +10,7 @@ def find_nearest(a, a0): # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() # Read the time array from the equilibrium IDS eq = entry.get("equilibrium") diff --git a/docs/source/courses/basic/imaspy_snippets/read_whole_equilibrium.py b/docs/source/courses/basic/imas_snippets/read_whole_equilibrium.py similarity index 90% rename from docs/source/courses/basic/imaspy_snippets/read_whole_equilibrium.py rename to docs/source/courses/basic/imas_snippets/read_whole_equilibrium.py index 3d1a1f3a..c70c73cc 100644 --- a/docs/source/courses/basic/imaspy_snippets/read_whole_equilibrium.py +++ b/docs/source/courses/basic/imas_snippets/read_whole_equilibrium.py @@ -1,7 +1,7 @@ -import imaspy.training +import imas.training # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() # 1. Read and print the time of the equilibrium IDS for the whole scenario # This explicitly converts the data from the old DD version on disk, to the diff --git a/docs/source/courses/basic/imaspy_snippets/transform_grid.py b/docs/source/courses/basic/imas_snippets/transform_grid.py similarity index 90% rename from docs/source/courses/basic/imaspy_snippets/transform_grid.py rename to docs/source/courses/basic/imas_snippets/transform_grid.py index a2a56a20..f4cef866 100644 --- a/docs/source/courses/basic/imaspy_snippets/transform_grid.py +++ b/docs/source/courses/basic/imas_snippets/transform_grid.py @@ -4,7 +4,7 @@ import numpy as np from scipy.interpolate import RegularGridInterpolator -import imaspy.training +import imas.training if "DISPLAY" not in os.environ: matplotlib.use("agg") @@ -14,22 +14,22 @@ import matplotlib.pyplot as plt # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() # Lazy-loaded input equilibrium eq_in = entry.get("equilibrium", lazy=True) input_times = eq_in.time # Create output data entry -output_entry = imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "imaspy-course", 2, 1) +output_entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "imas-course", 2, 1) output_entry.create() # Loop over each time slice for time in input_times: - eq = entry.get_slice("equilibrium", time, imaspy.ids_defs.CLOSEST_INTERP) + eq = entry.get_slice("equilibrium", time, imas.ids_defs.CLOSEST_INTERP) # Update comment - eq.ids_properties.comment = "IMASPy training: transform coordinate system" + eq.ids_properties.comment = "imas-python training: transform coordinate system" p2d = eq.time_slice[0].profiles_2d[0] # Get `.value` so we can plot the original values after the IDS node is overwritten diff --git a/docs/source/courses/basic/imaspy_snippets/explore_public_ec_launchers.py b/docs/source/courses/basic/imaspy_snippets/explore_public_ec_launchers.py deleted file mode 100644 index 425e2260..00000000 --- a/docs/source/courses/basic/imaspy_snippets/explore_public_ec_launchers.py +++ /dev/null @@ -1,15 +0,0 @@ -import imaspy.util - -# Open input data entry -entry = imaspy.DBEntry( - imaspy.ids_defs.HDF5_BACKEND, "ITER_MD", 120000, 204, "public", data_version="3" -) -entry.open() - -# Get the ec_launchers IDS -pf = entry.get("ec_launchers") - -# Inspect the IDS -imaspy.util.inspect(pf, hide_empty_nodes=True) - -entry.close() diff --git a/docs/source/courses/basic/imaspy_snippets/explore_public_pf_active.py b/docs/source/courses/basic/imaspy_snippets/explore_public_pf_active.py deleted file mode 100644 index c8acbb39..00000000 --- a/docs/source/courses/basic/imaspy_snippets/explore_public_pf_active.py +++ /dev/null @@ -1,15 +0,0 @@ -import imaspy.util - -# Open input data entry -entry = imaspy.DBEntry( - imaspy.ids_defs.HDF5_BACKEND, "ITER_MD", 111001, 103, "public", data_version="3" -) -entry.open() - -# Get the pf_active IDS -pf = entry.get("pf_active") - -# Inspect the IDS -imaspy.util.inspect(pf, hide_empty_nodes=True) - -entry.close() diff --git a/docs/source/courses/basic/imaspy_snippets/explore_training_data.py b/docs/source/courses/basic/imaspy_snippets/explore_training_data.py deleted file mode 100644 index 68a16141..00000000 --- a/docs/source/courses/basic/imaspy_snippets/explore_training_data.py +++ /dev/null @@ -1,13 +0,0 @@ -import imaspy.util -import imaspy.training - -# Open input data entry -entry = imaspy.training.get_training_db_entry() - -# Get the core_profiles IDS -cp = entry.get("core_profiles") - -# Inspect the IDS -imaspy.util.inspect(cp, hide_empty_nodes=True) - -entry.close() \ No newline at end of file diff --git a/docs/source/courses/basic/imaspy_snippets/find_paths.py b/docs/source/courses/basic/imaspy_snippets/find_paths.py deleted file mode 100644 index 937ebd09..00000000 --- a/docs/source/courses/basic/imaspy_snippets/find_paths.py +++ /dev/null @@ -1,16 +0,0 @@ -import imaspy.util - -factory = imaspy.IDSFactory() -core_profiles = factory.core_profiles() - -print("Paths containing `rho`:") -print(imaspy.util.find_paths(core_profiles, "rho")) -print() - -print("Paths containing `rho`, not followed by `error`:") -print(imaspy.util.find_paths(core_profiles, "rho(?!.*error)")) -print() - -print("All paths ending with `time`:") -print(imaspy.util.find_paths(core_profiles, "time$")) -print() diff --git a/docs/source/courses/basic/setup.rst b/docs/source/courses/basic/setup.rst index f37bf4ce..3034cf76 100644 --- a/docs/source/courses/basic/setup.rst +++ b/docs/source/courses/basic/setup.rst @@ -1,13 +1,12 @@ -IMASPy 101: setup IMASPy -======================== +imas-python 101: setup imas-python +================================== -This course was written for IMASPy version 0.8.0 and requires an IMAS installation to -load IMAS data. IMASPy may be installed on your cluster, in which case you can do +This course was written for imas-python version 0.8.0 and requires an IMAS installation to +load IMAS data. imas-python may be installed on your cluster, in which case you can do .. code-block:: console - $ module load IMASPy IMAS - $ python -c 'import imaspy; print(imaspy.__version__)' - 0.8.0 + $ module load imas-python IMAS + $ python -c 'import imas; print(imas.__version__)' -Have a look at the :ref:`Installing IMASPy` page for more details on installing IMASPy. +Have a look at the :ref:`Installing imas-python` page for more details on installing imas-python. diff --git a/docs/source/courses/basic/transform.rst b/docs/source/courses/basic/transform.rst index 8ff0a647..6f208443 100644 --- a/docs/source/courses/basic/transform.rst +++ b/docs/source/courses/basic/transform.rst @@ -1,5 +1,5 @@ -Transform with IMASPy -===================== +Transform with imas-python +========================== In this part of the course we'll perform a coordinate transformation. Our input data is in rectilinear :math:`R, Z` coordinates, which we will transform into poloidal polar @@ -27,15 +27,9 @@ Exercise 1: Check which time slices exist .. hint:: You can use :ref:`lazy loading` to avoid loading all data in memory. - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/transform_grid.py - :start-at: # Open input data entry - :end-before: # Create output data entry - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Open input data entry :end-before: # Create output data entry @@ -50,15 +44,9 @@ Exercise 2: Load a time slice Loop over each available time in the IDS and load the time slice inside the loop. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/transform_grid.py - :start-at: # Loop over each time slice - :end-before: # Update comment - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Loop over each time slice :end-before: # Update comment @@ -86,15 +74,9 @@ We will apply the transformation of the data as follows: .. md-tab-set:: - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/transform_grid.py - :start-at: # Loop over each time slice - :end-before: # Finally, put the slice to disk - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Loop over each time slice :end-before: # Finally, put the slice to disk @@ -108,26 +90,14 @@ Exercise 4: Store a time slice Store the time slice after the transformation. - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Create output data entry :end-at: output_entry.create() :caption: The data entry is created once, outside the time slice loop - .. literalinclude:: al4_snippets/transform_grid.py - :start-at: # Finally, put the slice to disk - :end-at: output_entry.put_slice - :caption: Store the time slice inside the loop - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/transform_grid.py - :start-at: # Create output data entry - :end-at: output_entry.create() - :caption: The data entry is created once, outside the time slice loop - - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Finally, put the slice to disk :end-at: output_entry.put_slice :caption: Store the time slice inside the loop @@ -144,14 +114,9 @@ Exercise 5: Plotting data before and after the transformation :math:`\rho,\theta` plane (transformed data) to verify that the transformation is correct. - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/transform_grid.py - :start-at: # Create a plot - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Create a plot @@ -160,12 +125,7 @@ Bringing it all together .. md-tab-set:: - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/transform_grid.py - :caption: Source code for the complete exercise - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :caption: Source code for the complete exercise diff --git a/docs/source/courses/basic_user_training.rst b/docs/source/courses/basic_user_training.rst index f70be9ee..a3703462 100644 --- a/docs/source/courses/basic_user_training.rst +++ b/docs/source/courses/basic_user_training.rst @@ -1,18 +1,12 @@ -.. _`IMASPy 101`: +.. _`imas-python 101`: -IMASPy 101 -========== +imas-python 101 +=============== -In this IMASPy training, we introduce you to the basic concepts and features of -IMASPy. You will need some basic familiarity with Python. For a refresher, see +In this imas-python training, we introduce you to the basic concepts and features of +imas-python. You will need some basic familiarity with Python. For a refresher, see the `Python tutorial `_. We also assume -some basic knowledge of the ITER IMAS infrastructure, see -https://imas.iter.org. To get access to the nessecary knowledge and tooling -to start using IMASPy, including an already installed IMASPy, follow the excellent -`Getting Started `_ -until "How to read / write IMAS data", after which you can follow the IMASPy -specific guide below. - +some basic knowledge of the ITER IMAS infrastructure. .. toctree:: :caption: Training contents diff --git a/docs/source/identifiers.rst b/docs/source/identifiers.rst index 661e7fb2..de885a3d 100644 --- a/docs/source/identifiers.rst +++ b/docs/source/identifiers.rst @@ -20,25 +20,25 @@ representations: 3. A description (long string) -Identifiers in IMASPy ---------------------- +Identifiers in imas-python +-------------------------- -IMASPy implements identifiers as an :py:class:`enum.Enum`. Identifiers are +imas-python implements identifiers as an :py:class:`enum.Enum`. Identifiers are constructed on-demand from the loaded Data Dictionary definitions. -All identifier enums can be accessed through ``imaspy.identifiers``. A list of -the available identifiers is stored as ``imaspy.identifiers.identifiers``. +All identifier enums can be accessed through ``imas.identifiers``. A list of +the available identifiers is stored as ``imas.identifiers.identifiers``. .. code-block:: python :caption: Accessing identifiers - import imaspy + import imas # List all identifier names - for identifier_name in imaspy.identifiers.identifiers: + for identifier_name in imas.identifiers.identifiers: print(identifier_name) # Get a specific identifier - csid = imaspy.identifiers.core_source_identifier + csid = imas.identifiers.core_source_identifier # Get and print information of an identifier value print(csid.total) print(csid.total.index) @@ -47,23 +47,23 @@ the available identifiers is stored as ``imaspy.identifiers.identifiers``. # Item access is also possible print(identifiers["edge_source_identifier"]) - # You can use imaspy.util.inspect to list all options - imaspy.util.inspect(identifiers.ggd_identifier) + # You can use imas.util.inspect to list all options + imas.util.inspect(identifiers.ggd_identifier) # And also to get more details of a specific option - imaspy.util.inspect(identifiers.ggd_identifier.SN) + imas.util.inspect(identifiers.ggd_identifier.SN) # When an IDS node is an identifier, you can use # metadata.identifier_enum to get the identifier - core_sources = imaspy.IDSFactory().core_sources() + core_sources = imas.IDSFactory().core_sources() core_sources.source.resize(1) print(core_sources.source[0].identifier.metadata.identifier_enum) -Assigning identifiers in IMASPy -------------------------------- +Assigning identifiers in imas-python +------------------------------------ -IMASPy implements smart assignment of identifiers. You may assign an identifier -enum value (for example ``imaspy.identifiers.core_source_identifier.total``), a +imas-python implements smart assignment of identifiers. You may assign an identifier +enum value (for example ``imas.identifiers.core_source_identifier.total``), a string (for example ``"total"``) or an integer (for example ``"1"``) to an identifier structure (for example ``core_profiles.source[0].identifier``) to set all three child nodes ``name``, ``index`` and ``description`` in one go. See @@ -72,12 +72,12 @@ below example: .. code-block:: python :caption: Assigning identifiers - import imaspy + import imas - core_sources = imaspy.IDSFactory().core_sources() + core_sources = imas.IDSFactory().core_sources() core_sources.source.resize(2) - csid = imaspy.identifiers.core_source_identifier + csid = imas.identifiers.core_source_identifier # We can set the identifier in three ways: # 1. Assign an instance of the identifier enum: core_sources.source[0].identifier = csid.total @@ -87,18 +87,18 @@ below example: core_sources.source[0].identifier = 1 # Inspect the contents of the structure - imaspy.util.inspect(core_sources.source[0].identifier) + imas.util.inspect(core_sources.source[0].identifier) # You can still assign any value to the individual name / index / # description nodes: core_sources.source[1].identifier.name = "total" # Only name is set, index and description are empty - imaspy.util.inspect(core_sources.source[1].identifier) + imas.util.inspect(core_sources.source[1].identifier) # This also allows to use not-yet-standardized identifier values core_sources.source[1].identifier.name = "my_custom_identifier" core_sources.source[1].identifier.index = -1 core_sources.source[1].identifier.description = "My custom identifier" - imaspy.util.inspect(core_sources.source[1].identifier) + imas.util.inspect(core_sources.source[1].identifier) Compare identifiers @@ -117,9 +117,9 @@ the Data Dictionary description: .. code-block:: python :caption: Comparing identifiers - >>> import imaspy - >>> csid = imaspy.identifiers.core_source_identifier - >>> core_sources = imaspy.IDSFactory().core_sources() + >>> import imas + >>> csid = imas.identifiers.core_source_identifier + >>> core_sources = imas.IDSFactory().core_sources() >>> core_sources.source.resize(1) >>> core_sources.source[0].identifier.index = 1 >>> # Compares equal to csid.total, though name and description are empty @@ -143,8 +143,8 @@ the Data Dictionary description: .. seealso:: - - :py:class:`imaspy.ids_identifiers.IDSIdentifier`: which is the base class + - :py:class:`imas.ids_identifiers.IDSIdentifier`: which is the base class of all identifier enumerations. - - :py:data:`imaspy.ids_identifiers.identifiers`: identifier accessor. - - :py:attr:`imaspy.ids_metadata.IDSMetadata.identifier_enum`: get the + - :py:data:`imas.ids_identifiers.identifiers`: identifier accessor. + - :py:attr:`imas.ids_metadata.IDSMetadata.identifier_enum`: get the identifier enum from an IDS node. diff --git a/docs/source/imaspy_architecture.rst b/docs/source/imas_architecture.rst similarity index 58% rename from docs/source/imaspy_architecture.rst rename to docs/source/imas_architecture.rst index 90c0f0ed..bfcab45d 100644 --- a/docs/source/imaspy_architecture.rst +++ b/docs/source/imas_architecture.rst @@ -1,7 +1,7 @@ -IMASPy Architecture -=================== +imas-python architecture +======================== -This document provides a brief overview of the components of IMASPy, grouped into +This document provides a brief overview of the components of imas-python, grouped into different functional areas. We don't aim to give detailed explanations of the code or the algorithms in it. These @@ -15,53 +15,53 @@ These classes are used to parse and represent IDS metadata from the Data Diction Metadata objects are generated from a Data Dictionary XML and are (supposed to be) immutable. -- :py:mod:`imaspy.ids_metadata` contains the main metadata class - :py:class:`~imaspy.ids_metadata.IDSMetadata`. This class is generated from an +- :py:mod:`imas.ids_metadata` contains the main metadata class + :py:class:`~imas.ids_metadata.IDSMetadata`. This class is generated from an ```` or ```` element in the Data Dictionary XML and contains all (parsed) data belonging to that ```` or ````. Most of the (Python) attributes correspond directly to an attribute of the XML element. - This module also contains the :py:class:`~imaspy.ids_metadata.IDSType` enum. This + This module also contains the :py:class:`~imas.ids_metadata.IDSType` enum. This enum corresponds to the Data Dictionary notion of ``type`` which can be ``dynamic``, ``constant``, ``static`` or unavailable on a Data Dictionary element. -- :py:mod:`imaspy.ids_coordinates` contains two classes: - :py:class:`~imaspy.ids_coordinates.IDSCoordinate`, which handles the parsing of +- :py:mod:`imas.ids_coordinates` contains two classes: + :py:class:`~imas.ids_coordinates.IDSCoordinate`, which handles the parsing of coordinate identifiers from the Data Dictionary, and - :py:class:`~imaspy.ids_coordinates.IDSCoordinates`, which handles coordinate + :py:class:`~imas.ids_coordinates.IDSCoordinates`, which handles coordinate retrieval and validation of IDS nodes. - :py:class:`~imaspy.ids_coordinates.IDSCoordinate`\ s are created for each coordinate + :py:class:`~imas.ids_coordinates.IDSCoordinate`\ s are created for each coordinate attribute of a Data Dictionary element: ``coordinate1``, ``coordinate2``, ... ``coordinate1_same_as``, etc. - :py:class:`~imaspy.ids_coordinates.IDSCoordinates` is created and assigned as - ``coordinates`` attribute of :py:class:`~imaspy.ids_struct_array.IDSStructArray` and - :py:class:`~imaspy.ids_primitive.IDSPrimitive` objects. This class is responsible + :py:class:`~imas.ids_coordinates.IDSCoordinates` is created and assigned as + ``coordinates`` attribute of :py:class:`~imas.ids_struct_array.IDSStructArray` and + :py:class:`~imas.ids_primitive.IDSPrimitive` objects. This class is responsible for retrieving coordinate values and for checking the coordinate consistency in - :py:func:`~imaspy.ids_toplevel.IDSToplevel.validate`. + :py:func:`~imas.ids_toplevel.IDSToplevel.validate`. -- :py:mod:`imaspy.ids_data_type` handles parsing Data Dictionary ``data_type`` - attributes (see method :py:meth:`~imaspy.ids_data_type.IDSDataType.parse`) to an - :py:class:`~imaspy.ids_data_type.IDSDataType` and number of dimensions. +- :py:mod:`imas.ids_data_type` handles parsing Data Dictionary ``data_type`` + attributes (see method :py:meth:`~imas.ids_data_type.IDSDataType.parse`) to an + :py:class:`~imas.ids_data_type.IDSDataType` and number of dimensions. - :py:class:`~imaspy.ids_data_type.IDSDataType` also has attributes for default values + :py:class:`~imas.ids_data_type.IDSDataType` also has attributes for default values and mappings to Python / Numpy / Access Layer type identifiers. -- :py:mod:`imaspy.ids_path` handles parsing of IDS paths to - :py:class:`~imaspy.ids_path.IDSPath` objects. Paths can occur as the ``path`` +- :py:mod:`imas.ids_path` handles parsing of IDS paths to + :py:class:`~imas.ids_path.IDSPath` objects. Paths can occur as the ``path`` attribute of Data Dictionary elements, and inside coordinate identifiers. .. caution:: - Although an :py:class:`~imaspy.ids_path.IDSPath` in IMASPy implements roughly + Although an :py:class:`~imas.ids_path.IDSPath` in imas-python implements roughly the same concept as `the "IDS Path syntax" in the Data Dictionary - `__, + `__, they are not necessarily the same thing! At the moment of writing this (January 2024), the IDS path definition in the Data Dictionary is not yet finalized. - Be aware that the syntax of IMASPy's :py:class:`~imaspy.ids_path.IDSPath` may + Be aware that the syntax of imas-python's :py:class:`~imas.ids_path.IDSPath` may differ slightly and might be incompatible with the definition from the Data Dictionary. @@ -72,22 +72,22 @@ Data Dictionary building and loading The following submodules are responsible for building the Data Dictionary and loading DD definitions at runtime. -- :py:mod:`imaspy.dd_helpers` handles building the ``IDSDef.zip`` file, containing all +- :py:mod:`imas.dd_helpers` handles building the ``IDSDef.zip`` file, containing all versions of the Data Dictionary since ``3.22.0``. -- :py:mod:`imaspy.dd_zip` handles loading the Data Dictionary definitions at run time. +- :py:mod:`imas.dd_zip` handles loading the Data Dictionary definitions at run time. These definitions can be loaded from an ``IDSDef.zip`` or from a custom XML file. -.. _imaspy_architecture/IDS_nodes: +.. _imas_architecture/IDS_nodes: IDS nodes --------- The following submodules and classes represent IDS nodes. -- :py:mod:`imaspy.ids_base` defines the base class for all IDS nodes: - :py:class:`~imaspy.ids_base.IDSBase`. This class is an abstract class and shouldn't +- :py:mod:`imas.ids_base` defines the base class for all IDS nodes: + :py:class:`~imas.ids_base.IDSBase`. This class is an abstract class and shouldn't be instantiated directly. Several useful properties are defined in this class, which are therefore available @@ -98,9 +98,9 @@ The following submodules and classes represent IDS nodes. .. code-block:: python - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles._parent - + >>> core_profiles.ids_properties._parent >>> core_profiles.ids_properties.homogeneous_time._parent @@ -116,9 +116,9 @@ The following submodules and classes represent IDS nodes. .. code-block:: python - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles._dd_parent - + >>> core_profiles.ids_properties._dd_parent >>> core_profiles.ids_properties.homogeneous_time._dd_parent @@ -134,52 +134,52 @@ The following submodules and classes represent IDS nodes. indices. - ``_lazy`` indicates if the IDS is lazy loaded. - ``_version`` is the Data Dictionary version of this node. - - ``_toplevel`` is a shortcut to the :py:class:`~imaspy.ids_toplevel.IDSToplevel` + - ``_toplevel`` is a shortcut to the :py:class:`~imas.ids_toplevel.IDSToplevel` element that this node is a decendent of. -- :py:mod:`imaspy.ids_primitive` contains all data node classes, which are child - classes of :py:class:`~imaspy.ids_primitive.IDSPrimitive`. ``IDSPrimitive`` +- :py:mod:`imas.ids_primitive` contains all data node classes, which are child + classes of :py:class:`~imas.ids_primitive.IDSPrimitive`. ``IDSPrimitive`` implements all functionality that is common for every data type, whereas the classes in below list are specific per data type. Assignment-time data type checking is handled by the setter of the - :py:attr:`~imaspy.ids_primitive.IDSPrimitive.value` property and the ``_cast_value`` + :py:attr:`~imas.ids_primitive.IDSPrimitive.value` property and the ``_cast_value`` methods on each of the type specialization classes. - - :py:class:`~imaspy.ids_primitive.IDSString0D` is the type specialization for 0D + - :py:class:`~imas.ids_primitive.IDSString0D` is the type specialization for 0D strings. It can be used as if it is a python :external:py:class:`str` object. - - :py:class:`~imaspy.ids_primitive.IDSString1D` is the type specialization for 1D + - :py:class:`~imas.ids_primitive.IDSString1D` is the type specialization for 1D strings. It behaves as if it is a python :external:py:class:`list` of :external:py:class:`str`. - - :py:class:`~imaspy.ids_primitive.IDSNumeric0D` is the base class for 0D + - :py:class:`~imas.ids_primitive.IDSNumeric0D` is the base class for 0D numerical types: - - :py:class:`~imaspy.ids_primitive.IDSComplex0D` is the type specialization + - :py:class:`~imas.ids_primitive.IDSComplex0D` is the type specialization for 0D complex numbers. It can be used as if it is a python :external:py:class:`complex`. - - :py:class:`~imaspy.ids_primitive.IDSFloat0D` is the type specialization + - :py:class:`~imas.ids_primitive.IDSFloat0D` is the type specialization for 0D floating point numbers. It can be used as if it is a python :external:py:class:`float`. - - :py:class:`~imaspy.ids_primitive.IDSInt0D` is the type specialization + - :py:class:`~imas.ids_primitive.IDSInt0D` is the type specialization for 0D whole numbers. It can be used as if it is a python :external:py:class:`int`. - - :py:class:`~imaspy.ids_primitive.IDSNumericArray` is the type specialization for + - :py:class:`~imas.ids_primitive.IDSNumericArray` is the type specialization for any numeric type with at least one dimension. It can be used as if it is a :external:py:class:`numpy.ndarray`. -- :py:mod:`imaspy.ids_struct_array` contains the - :py:class:`~imaspy.ids_struct_array.IDSStructArray` class, which models Arrays of +- :py:mod:`imas.ids_struct_array` contains the + :py:class:`~imas.ids_struct_array.IDSStructArray` class, which models Arrays of Structures. It also contains some :ref:`dev lazy loading` logic. -- :py:mod:`imaspy.ids_structure` contains the - :py:class:`~imaspy.ids_structure.IDSStructure` class, which models Structures. It +- :py:mod:`imas.ids_structure` contains the + :py:class:`~imas.ids_structure.IDSStructure` class, which models Structures. It contains the :ref:`lazy instantiation` logic and some of the :ref:`dev lazy loading` logic. -- :py:mod:`imaspy.ids_toplevel` contains the - :py:class:`~imaspy.ids_toplevel.IDSToplevel` class, which is a subclass of - :py:class:`~imaspy.ids_structure.IDSStructure` and models toplevel IDSs. +- :py:mod:`imas.ids_toplevel` contains the + :py:class:`~imas.ids_toplevel.IDSToplevel` class, which is a subclass of + :py:class:`~imas.ids_structure.IDSStructure` and models toplevel IDSs. It implements some API methods that are only available on IDSs, such as ``validate`` and ``(de)serialize``, and overwrites implementations of some @@ -197,9 +197,9 @@ have any IDS child nodes instantiated: .. code-block:: python - >>> import imaspy + >>> import imas >>> # Create an empty IDS - >>> cp = imaspy.IDSFactory().core_profiles() + >>> cp = imas.IDSFactory().core_profiles() >>> # Show which elements are already created: >>> list(cp.__dict__) ['_lazy', '_children', '_parent', 'metadata', '__doc__', '_lazy_context'] @@ -221,12 +221,12 @@ Lazy loading '''''''''''' :ref:`lazy loading` defers reading the data from the backend in a -:py:meth:`~imaspy.db_entry.DBEntry.get` or :py:meth:`~imaspy.db_entry.DBEntry.get_slice` +:py:meth:`~imas.db_entry.DBEntry.get` or :py:meth:`~imas.db_entry.DBEntry.get_slice` until the data is requested. This is handled in two places: 1. ``IDSStructure.__getattr__`` implements the lazy loading alongside the lazy instantiation. When a new element is created by lazy instantiation, it will call - ``imaspy.db_entry_helpers._get_child`` to lazy load this element: + ``imas.db_entry_helpers._get_child`` to lazy load this element: - When the element is a data node (``IDSPrimitive`` subclass), the data for this element is loaded from the backend. @@ -244,18 +244,18 @@ until the data is requested. This is handled in two places: Creating and loading IDSs ------------------------- -- :py:mod:`imaspy.db_entry` contains the :py:class:`~imaspy.db_entry.DBEntry` class. +- :py:mod:`imas.db_entry` contains the :py:class:`~imas.db_entry.DBEntry` class. This class represents an on-disk Data Entry and can be used to store - (:py:meth:`~imaspy.db_entry.DBEntry.put`, - :py:meth:`~imaspy.db_entry.DBEntry.put_slice`) or load - (:py:meth:`~imaspy.db_entry.DBEntry.get`, - :py:meth:`~imaspy.db_entry.DBEntry.get_slice`) IDSs. The actual implementation of + (:py:meth:`~imas.db_entry.DBEntry.put`, + :py:meth:`~imas.db_entry.DBEntry.put_slice`) or load + (:py:meth:`~imas.db_entry.DBEntry.get`, + :py:meth:`~imas.db_entry.DBEntry.get_slice`) IDSs. The actual implementation of data storage and retrieval is handled by the backends in the - ``imaspy.backends.*`` subpackages. + ``imas.backends.*`` subpackages. - :py:class:`~imaspy.db_entry.DBEntry` handles the autoconversion between IDS versions + :py:class:`~imas.db_entry.DBEntry` handles the autoconversion between IDS versions as described in :ref:`Automatic conversion between DD versions`. -- :py:mod:`imaspy.ids_factory` contains the :py:class:`~imaspy.ids_factory.IDSFactory` +- :py:mod:`imas.ids_factory` contains the :py:class:`~imas.ids_factory.IDSFactory` class. This class is responsible for creating IDS toplevels from a given Data Dictionary definition, and can list all IDS names inside a DD definition. @@ -263,20 +263,19 @@ Creating and loading IDSs Access Layer interfaces ----------------------- -- :py:mod:`imaspy.backends.imas_core.al_context` provides an object-oriented interface when working with +- :py:mod:`imas.backends.imas_core.al_context` provides an object-oriented interface when working with Lowlevel contexts. The contexts returned by the lowlevel are an integer identifier and need to be provided to several LL methods (e.g. ``read_data``), some of which may create new contexts. - The :py:class:`~imaspy.backends.imas_core.al_context.ALContext` class implements this object oriented + The :py:class:`~imas.backends.imas_core.al_context.ALContext` class implements this object oriented interface. - A second class (:py:class:`~imaspy.backends.imas_core.al_context.LazyALContext`) implements the same + A second class (:py:class:`~imas.backends.imas_core.al_context.LazyALContext`) implements the same interface, but is used when :ref:`dev lazy loading`. -- :py:mod:`imaspy.ids_defs` provides access to Access Layer constants (mostly defined - in ``imas.imasdef``). -- :py:mod:`imaspy.backends.imas_core.imas_interface` provides a version-independent interface to the - Access Layer through :py:class:`~imaspy.backends.imas_core.imas_interface.LowlevelInterface`. It +- :py:mod:`imas.ids_defs` provides access to Access Layer constants +- :py:mod:`imas.backends.imas_core.imas_interface` provides a version-independent interface to the + Access Layer through :py:class:`~imas.backends.imas_core.imas_interface.LowlevelInterface`. It defines all known methods of the Access Layer and defers to the correct implementation if it is available in the loaded AL version (and raises a descriptive exception if the function is not available). @@ -285,40 +284,35 @@ Access Layer interfaces MDSplus support --------------- -- :py:mod:`imaspy.backends.imas_core.mdsplus_model` is responsible for creating MDSplus `models`. These +- :py:mod:`imas.backends.imas_core.mdsplus_model` is responsible for creating MDSplus `models`. These models are specific to a DD version and are required when using the MDSplus backend for creating new Data Entries. - .. seealso:: :ref:`MDSplus in IMASPy` + .. seealso:: :ref:`MDSplus in imas-python` Versioning ---------- -IMASPy uses `versioneer `_ for -versioning. An IMASPy release has a corresponding tag (which sets the version), e.g. -`this is the tag -`_ for -version ``0.8.0``. Development builds are versioned based on the ``git describe`` of the -repository. - -The ``imaspy._version`` module is generated by ``versioneer`` and implements this logic -for editable installs. This module is replaced by ``versioneer`` when building python -packages (this is handled in ``setup.py``). +imas-python uses `setuptools-scm `_ for +versioning. An imas-python release has a corresponding tag (which sets the version). +The ``imas._version`` module is generated by ``setuptools-scm`` and implements this logic +for editable installs. This module is generated by ``setuptools-scm`` when building python +packages. Conversion between Data Dictionary versions ------------------------------------------- -:py:mod:`imaspy.ids_convert` contains logic for converting an IDS between DD versions. +:py:mod:`imas.ids_convert` contains logic for converting an IDS between DD versions. -The :py:class:`~imaspy.ids_convert.DDVersionMap` class creates and contains mappings for +The :py:class:`~imas.ids_convert.DDVersionMap` class creates and contains mappings for an IDS between two Data Dictionary versions. It creates two mappings: one to be used when converting from the newer version of the two to the older version (``new_to_old``) and a map for the reverse (``old_to_new``). These mappings are of type -:py:class:`~imaspy.ids_convert.NBCPathMap`. See its API documentation for more details. +:py:class:`~imas.ids_convert.NBCPathMap`. See its API documentation for more details. -:py:func:`~imaspy.ids_convert.convert_ids` is the main API method for converting IDSs +:py:func:`~imas.ids_convert.convert_ids` is the main API method for converting IDSs between versions. It works as follows: - It builds a ``DDVersionMap`` between the two DD versions version and selects the @@ -326,11 +320,11 @@ between versions. It works as follows: - If needed, it creates a target IDS of the destination DD version. - It then uses the ``NBCPathMap`` to convert data and store it in the target IDS. -:py:class:`~imaspy.db_entry.DBEntry` can also handle automatic DD version conversion. It +:py:class:`~imas.db_entry.DBEntry` can also handle automatic DD version conversion. It uses the same ``DDVersionMap`` and ``NBCPathMap`` as -:py:func:`~imaspy.ids_convert.convert_ids`. When reading data from the backends, the +:py:func:`~imas.ids_convert.convert_ids`. When reading data from the backends, the ``NBCPathMap`` is used to translate between the old and the new DD version. See the -implementation in :py:mod:`imaspy.backends.imas_core.db_entry_helpers`. +implementation in :py:mod:`imas.backends.imas_core.db_entry_helpers`. Miscelleneous @@ -339,13 +333,13 @@ Miscelleneous The following is a list of miscelleneous modules, which don't belong to any of the other categories on this page. -- :py:mod:`imaspy.exception` contains all Exception classes that IMASPy may raise. -- :py:mod:`imaspy.setup_logging` initializes a logging handler for IMASPy. -- :py:mod:`imaspy.training` contains helper methods for making training data +- :py:mod:`imas.exception` contains all Exception classes that imas-python may raise. +- :py:mod:`imas.setup_logging` initializes a logging handler for imas-python. +- :py:mod:`imas.training` contains helper methods for making training data available. -- :py:mod:`imaspy.util` contains useful utility methods. It is imported automatically. +- :py:mod:`imas.util` contains useful utility methods. It is imported automatically. All methods requiring third party libraries (``rich`` and ``scipy``) are implemented - in ``imaspy._util``. This avoids importing these libraries immediately when a - user imports ``imaspy`` (which can take a couple hundred milliseconds). Instead, + in ``imas._util``. This avoids importing these libraries immediately when a + user imports ``imas`` (which can take a couple hundred milliseconds). Instead, this module is only loaded when a user needs this functionality. diff --git a/docs/source/imaspy_structure.png b/docs/source/imas_structure.png similarity index 100% rename from docs/source/imaspy_structure.png rename to docs/source/imas_structure.png diff --git a/docs/source/index.rst b/docs/source/index.rst index c5a3f24c..20a5d80a 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -2,12 +2,12 @@ Master "index". This will be converted to a landing index.html by sphinx. We define TOC here, but it'll be put in the sidebar by the theme -============= -IMASPy Manual -============= +================== +imas-python manual +================== -IMASPy is a pure-python library to handle arbitrarily nested -data structures. IMASPy is designed for, but not necessarily bound to, +imas-python is a pure-python library to handle arbitrarily nested +data structures. imas-python is designed for, but not necessarily bound to, interacting with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. @@ -20,9 +20,9 @@ It provides: - Checking of correctness at assign time, instead of at database write time - Dynamically created in-memory pre-filled data trees from DD XML specifications -The README is best read on :src:`#imaspy`. +The README is best read on :src:`#imas`. -Read what's new in the current version of IMASPy in our :ref:`changelog`! +Read what's new in the current version of imas-python in our :ref:`changelog`! Manual @@ -48,7 +48,7 @@ Manual changelog .. toctree:: - :caption: IMASPy training courses + :caption: imas-python training courses :maxdepth: 1 courses/basic_user_training @@ -64,14 +64,14 @@ Manual .. toctree:: - :caption: IMASPy development + :caption: imas-python development :maxdepth: 1 - imaspy_architecture + imas_architecture code_style ci_config benchmarking - release_imaspy + release_imas LICENSE diff --git a/docs/source/installing.rst b/docs/source/installing.rst index 3f596aaa..a454d0aa 100644 --- a/docs/source/installing.rst +++ b/docs/source/installing.rst @@ -1,46 +1,33 @@ -.. _`Installing IMASPy`: +.. _`Installing imas-python`: -Installing IMASPy -================= +Installing imas-python +====================== -IMASPy is a pure Python package. For full functionality of the package you need -an installation of `the IMAS Access Layer `_. See -:ref:`IMASPy 5 minute introduction` for an overview of functionality which does -(not) require the IMAS Access Layer available. +imas-python is a pure Python package. For full functionality of the package you need +an installation of `the IMAS Core library `_. See +:ref:`imas-python 5 minute introduction` for an overview of functionality which does +(not) require the IMAS Core library available. - -IMASPy modules on the ITER cluster and EuroFusion gateway ---------------------------------------------------------- - -There is a `module` available on the ITER and Eurofusion Gateway clusters, so -you can run: +To get started, you can install it from `pypi.org `_: .. code-block:: bash - module load IMASPy - -Additionally, if you wish to use the MDSPlus backend, you should load: - -.. code-block:: bash + pip install imas-python - module load MDSplus-Java/7.96.17-GCCcore-10.2.0-Java-11 -If you're using a different cluster, please contact your system administrator to see -if IMASPy is available (or can be made available) on the system. +Local installation from sources +------------------------------- - -Local installation ------------------- - -We recommend using a :external:py:mod:`venv`. Then, clone the IMASPy repository +We recommend using a :external:py:mod:`venv`. Then, clone the imas-python repository and run `pip install`: .. code-block:: bash python3 -m venv ./venv . venv/bin/activate - git clone ssh://git@git.iter.org/imas/imaspy.git - cd imaspy + + git clone ssh://git@github.com:iterorganization/imas-python.git + cd imas pip install --upgrade pip pip install --upgrade wheel setuptools pip install . @@ -61,32 +48,22 @@ Test your installation by trying .. code-block:: bash cd ~ - python -c "import imaspy; print(imaspy.__version__)" + python -c "import imas; print(imas.__version__)" -This is how to run the IMASPy test suite: +This is how to run the imas-python test suite: .. code-block:: bash - # inside the IMASPy git repository - pytest imaspy --mini + # inside the imas-python git repository + pytest imas --mini # run with a specific backend - pytest imaspy --ascii --mini + pytest imas --ascii --mini -And to build the IMASPy documentation, execute: +And to build the imas-python documentation, execute: .. code-block:: bash make -C docs html -Installation without ITER access --------------------------------- - -The installation script tries to access the `ITER IMAS Core Data Dictionary -repository `_ -to fetch the latest versions. If you do not have git+ssh access there, you can -try to find this repository elsewhere, and do a ``git fetch --tags``. - -Alternatively you could try to obtain an ``IDSDef.zip`` and place it in -``~/.config/imaspy/``. diff --git a/docs/source/intro.rst b/docs/source/intro.rst index a32799e6..30c75e1e 100644 --- a/docs/source/intro.rst +++ b/docs/source/intro.rst @@ -1,7 +1,7 @@ -.. _`IMASPy 5 minute introduction`: +.. _`imas-python 5 minute introduction`: -IMASPy 5 minute introduction ----------------------------- +imas-python 5 minute introduction +--------------------------------- .. contents:: Contents :local: @@ -11,39 +11,39 @@ IMASPy 5 minute introduction Verify your IMAS installation ''''''''''''''''''''''''''''' -Before continuing, verify that your imaspy install is working. Check the -:ref:`Installing IMASPy` page for installation instructions if below fails for -you. Start python and import imaspy. Note that the version in below output may +Before continuing, verify that your imas install is working. Check the +:ref:`Installing imas-python` page for installation instructions if below fails for +you. Start python and import imas. Note that the version in below output may be outdated. .. code-block:: python - >>> import imaspy - >>> print(imaspy.__version__) + >>> import imas + >>> print(imas.__version__) 1.0.0 .. note:: - If you have an IMASPy install without the IMAS Access Layer, importing - IMASPy will display an error message. You can still use IMASPy, but not all + If you have an imas-python install without the IMAS Access Layer, importing + imas-python will display an error message. You can still use imas-python, but not all functionalities are available. Create and use an IDS ''''''''''''''''''''' -To create an IDS, you must first make an :py:class:`~imaspy.ids_factory.IDSFactory` +To create an IDS, you must first make an :py:class:`~imas.ids_factory.IDSFactory` object. The IDS factory is necessary for specifying which version of the IMAS Data -Dictionary you want to use. If you don't specify anything, IMASPy uses the same Data +Dictionary you want to use. If you don't specify anything, imas-python uses the same Data Dictionary version as the loaded IMAS environment, or the latest available version. See :ref:`Using multiple DD versions in the same environment` for more information on different Data Dictionary versions. .. code-block:: python - >>> import imaspy + >>> import imas >>> import numpy as np - >>> ids_factory = imaspy.IDSFactory() + >>> ids_factory = imas.IDSFactory() 13:26:47 [INFO] Parsing data dictionary version 3.38.1 @dd_zip.py:127 >>> # Create an empty core_profiles IDS >>> core_profiles = ids_factory.core_profiles() @@ -52,8 +52,8 @@ We can now use this ``core_profiles`` IDS and assign some data to it: .. code-block:: python - >>> core_profiles.ids_properties.comment = "Testing IMASPy" - >>> core_profiles.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + >>> core_profiles.ids_properties.comment = "Testing imas-python" + >>> core_profiles.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS >>> # array quantities are automatically converted to the appropriate numpy arrays >>> core_profiles.time = [1, 2, 3] >>> # the python list of ints is converted to a 1D array of floats @@ -68,7 +68,7 @@ We can now use this ``core_profiles`` IDS and assign some data to it: >>> core_profiles.profiles_1d[0].grid.rho_tor_norm = [0, 0.5, 1.0] >>> core_profiles.profiles_1d[0].j_tor = [0, 0, 0] -As you can see in the example above, IMASPy automatically checks the data you try to +As you can see in the example above, imas-python automatically checks the data you try to assign to an IDS with the data type specified in the Data Dictionary. When possible, your data is automatically converted to the expected type. You will get an error message if this is not possible: @@ -90,7 +90,7 @@ Store an IDS to disk .. note:: - This functionality requires the IMAS Access Layer. - - This API will change when IMASPy is moving to Access Layer 5 (expected Q2 + - This API will change when imas-python is moving to Access Layer 5 (expected Q2 2023). To store an IDS to disk, we need to indicate the following information to the @@ -102,18 +102,18 @@ IMAS Access Layer. Please check the `IMAS Access Layer documentation - ``pulse`` - ``run`` -In IMASPy you do this as follows: +In imas-python you do this as follows: .. code-block:: python >>> # Create a new IMAS data entry for storing the core_profiles IDS we created earlier >>> # Here we specify the backend, database, pulse and run - >>> dbentry = imaspy.DBEntry(imaspy.ids_defs.HDF5_BACKEND, "TEST", 10, 2) + >>> dbentry = imas.DBEntry(imas.ids_defs.HDF5_BACKEND, "TEST", 10, 2) >>> dbentry.create() >>> # now store the core_profiles IDS we just populated >>> dbentry.put(core_profiles) -.. image:: imaspy_structure.png +.. image:: imas_structure.png Load an IDS from disk @@ -122,7 +122,7 @@ Load an IDS from disk .. note:: - This functionality requires the IMAS Access Layer. - - This API will change when IMASPy is moving to Access Layer 5 (expected Q2 + - This API will change when imas-python is moving to Access Layer 5 (expected Q2 2023). To load an IDS from disk, you need to specify the same information as @@ -132,8 +132,8 @@ can use ``.get()`` to load IDS data from disk: .. code-block:: python >>> # Now load the core_profiles IDS back from disk - >>> dbentry2 = imaspy.DBEntry(imaspy.ids_defs.HDF5_BACKEND, "TEST", 10, 2) + >>> dbentry2 = imas.DBEntry(imas.ids_defs.HDF5_BACKEND, "TEST", 10, 2) >>> dbentry2.open() >>> core_profiles2 = dbentry2.get("core_profiles") >>> print(core_profiles2.ids_properties.comment.value) - Testing IMASPy + Testing imas-python diff --git a/docs/source/lazy_loading.rst b/docs/source/lazy_loading.rst index cfea365e..a4317d5d 100644 --- a/docs/source/lazy_loading.rst +++ b/docs/source/lazy_loading.rst @@ -4,12 +4,12 @@ Lazy loading ============ When reading data from a data entry (using :meth:`DBEntry.get -`, or :meth:`DBEntry.get_slice -`), by default all data is read immediately from the +`, or :meth:`DBEntry.get_slice +`), by default all data is read immediately from the lowlevel Access Layer backend. This may take a long time to complete if the data entry has a lot of data stored for the requested IDS. -Instead of reading data immediately, IMASPy can also `lazy load` the data when you need +Instead of reading data immediately, imas-python can also `lazy load` the data when you need it. This will speed up your program in cases where you are interested in a subset of all the data stored in an IDS. @@ -18,12 +18,12 @@ Enable lazy loading of data --------------------------- You can enable lazy loading of data by supplying the keyword argument :code:`lazy=True` -to :meth:`DBEntry.get `, or :meth:`DBEntry.get_slice -`. The returned IDS +to :meth:`DBEntry.get `, or :meth:`DBEntry.get_slice +`. The returned IDS object will fetch the data from the backend at the moment that you want to access it. See below example: -.. literalinclude:: courses/basic/imaspy_snippets/plot_core_profiles_te.py +.. literalinclude:: courses/basic/imas_snippets/plot_core_profiles_te.py :caption: Example with lazy loading of data In this example, using lazy loading with the MDSPLUS backend is about 12 times @@ -39,39 +39,39 @@ Lazy loading of data may speed up your programs, but also comes with some limita 1. Some functionality is not implemented or works differently for lazy-loaded IDSs: - Iterating over non-empty nodes works differently, see API documentation: - :py:meth:`imaspy.ids_structure.IDSStructure.iter_nonempty_`. - - :py:meth:`~imaspy.ids_structure.IDSStructure.has_value` is not implemented for + :py:meth:`imas.ids_structure.IDSStructure.iter_nonempty_`. + - :py:meth:`~imas.ids_structure.IDSStructure.has_value` is not implemented for lazy-loaded structure elements. - - :py:meth:`~imaspy.ids_toplevel.IDSToplevel.validate` will only validate loaded + - :py:meth:`~imas.ids_toplevel.IDSToplevel.validate` will only validate loaded data. Additional data might be loaded from the backend to validate coordinate sizes. - - :py:meth:`imaspy.util.print_tree` will only print data that is loaded when - :py:param:`~imaspy.util.print_tree.hide_empty_nodes` is ``True``. - - :py:meth:`imaspy.util.visit_children`: + - :py:meth:`imas.util.print_tree` will only print data that is loaded when + :py:param:`~imas.util.print_tree.hide_empty_nodes` is ``True``. + - :py:meth:`imas.util.visit_children`: - - When :py:param:`~imaspy.util.visit_children.visit_empty` is ``False`` + - When :py:param:`~imas.util.visit_children.visit_empty` is ``False`` (default), this method uses - :py:meth:`~imaspy.ids_structure.IDSStructure.iter_nonempty_`. This raises an + :py:meth:`~imas.ids_structure.IDSStructure.iter_nonempty_`. This raises an error for lazy-loaded IDSs, unless you set - :py:param:`~imaspy.util.visit_children.accept_lazy` to ``True``. - - When :py:param:`~imaspy.util.visit_children.visit_empty` is ``True``, this + :py:param:`~imas.util.visit_children.accept_lazy` to ``True``. + - When :py:param:`~imas.util.visit_children.visit_empty` is ``True``, this will iteratively load `all` data from the backend. This is effectively a full, but less efficient, ``get()``\ /\ ``get_slice()``. It will be faster if you don't use lazy loading in this case. - - IDS conversion through :py:meth:`imaspy.convert_ids - ` is not implemented for lazy loaded IDSs. Note + - IDS conversion through :py:meth:`imas.convert_ids + ` is not implemented for lazy loaded IDSs. Note that :ref:`Automatic conversion between DD versions` also applies when lazy loading. - Lazy loaded IDSs are read-only, setting or changing values, resizing arrays of structures, etc. is not allowed. - - You cannot :py:meth:`~imaspy.db_entry.DBEntry.put`, - :py:meth:`~imaspy.db_entry.DBEntry.put_slice` or - :py:meth:`~imaspy.ids_toplevel.IDSToplevel.serialize` lazy-loaded IDSs. + - You cannot :py:meth:`~imas.db_entry.DBEntry.put`, + :py:meth:`~imas.db_entry.DBEntry.put_slice` or + :py:meth:`~imas.ids_toplevel.IDSToplevel.serialize` lazy-loaded IDSs. - Copying lazy-loaded IDSs (through :external:py:func:`copy.deepcopy`) is not implemented. -2. IMASPy **assumes** that the underlying data entry is not modified. +2. imas-python **assumes** that the underlying data entry is not modified. When you (or another user) overwrite or add data to the same data entry, you may end up with a mix of old and new data in the lazy loaded IDS. @@ -89,5 +89,5 @@ Lazy loading of data may speed up your programs, but also comes with some limita 4. Lazy loading has more overhead for reading data from the lowlevel: it is therefore more efficient to do a full :code:`get()` or :code:`get_slice()` when you intend to use most of the data stored in an IDS. -5. When using IMASPy with remote data access (i.e. the UDA backend), a full +5. When using imas-python with remote data access (i.e. the UDA backend), a full :code:`get()` or :code:`get_slice()` is more efficient than lazy loading. diff --git a/docs/source/mdsplus.rst b/docs/source/mdsplus.rst index b097df43..1ff6e74e 100644 --- a/docs/source/mdsplus.rst +++ b/docs/source/mdsplus.rst @@ -1,15 +1,15 @@ -.. _`MDSplus in IMASPy`: +.. _`MDSplus in imas-python`: -MDSplus in IMASPy -================= +MDSplus in imas-python +====================== `MDSplus `_ is a set of software tools for data acquisition and storage and a methodology for management of complex -scientific data. IMASPy uses the IMAS LowLevel interface to interact +scientific data. imas-python uses the IMAS LowLevel interface to interact with MDSplus data. The model files required to read IMAS IDS-structured data are generated on demand, whenever a specific DD version is used by the user. As this generation might take a while, MDSplus models are -cached to disk, generally in ``$HOME/.cache/imaspy``. As multiple +cached to disk, generally in ``$HOME/.cache/imas``. As multiple processes can write to this location, especially during testing, special care is taken to avoid write collisions. ``$MDSPLUS_MODEL_TIMEOUT`` can be used to specify the amount of seconds diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index fcbd87d9..32545c35 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -5,21 +5,21 @@ IDS metadata Besides the data structure, the IMAS Data Dictionary also defines metadata associated with elements in the IDS, such as coordinate information, units, etc. -IMASPy provides the :py:class:`~imaspy.ids_metadata.IDSMetadata` API for +imas-python provides the :py:class:`~imas.ids_metadata.IDSMetadata` API for interacting with this metadata. On this page you find several examples for querying and using the metadata of IDS elements. .. seealso:: - IMASPy advanced training: :ref:`Using metadata` + imas-python advanced training: :ref:`Using metadata` Overview of available metadata ------------------------------ An overview of available metadata is given in the API documentation for -:py:class:`~imaspy.ids_metadata.IDSMetadata`. +:py:class:`~imas.ids_metadata.IDSMetadata`. The documented attributes are always available, but additional metadata from the data dictionary may be available as well. For example, the data dictionary indicates a ``lifecycle_last_change`` on all IDS @@ -28,8 +28,8 @@ metadata documentation, but you can still access it. See the following code samp .. code-block:: pycon - >>> import imaspy - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> import imas + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.metadata.lifecycle_last_change '3.39.0' @@ -60,7 +60,7 @@ looked up. See below example. .. code-block:: python :caption: Example getting coordinate values belonging to a 1D quantity - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.profiles_1d.resize(1) >>> profile = core_profiles.profiles_1d[0] >>> profile.grid.rho_tor_norm = [0, 0.15, 0.3, 0.45, 0.6] @@ -68,14 +68,14 @@ looked up. See below example. >>> profile.electrons.temperature.coordinates[0] IDSNumericArray("/core_profiles/profiles_1d/1/grid/rho_tor_norm", array([0. , 0.15, 0.3 , 0.45, 0.6 ])) -When a coordinate is just an index, IMASPy generates a +When a coordinate is just an index, imas-python generates a :external:py:func:`numpy.arange` with the same length as the data. See below example. .. code-block:: python :caption: Example getting index coordinate values belonging to an array of structures - >>> pf_active = imaspy.IDSFactory().pf_active() + >>> pf_active = imas.IDSFactory().pf_active() >>> pf_active.coil.resize(10) >>> # Coordinate1 of coil is an index 1...N >>> pf_active.coil.coordinates[0] @@ -84,18 +84,18 @@ example. .. rubric:: Time coordinates Time coordinates are a special case: the coordinates depend on whether the IDS -is in homogeneous time mode or not. IMASPy handles this transparently. +is in homogeneous time mode or not. imas-python handles this transparently. .. code-block:: python :caption: Example getting time coordinate values - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> core_profiles = imas.IDSFactory().core_profiles() >>> # profiles_1d is a time-dependent array of structures: >>> core_profiles.profiles_1d.coordinates[0] [...] ValueError: Invalid IDS time mode: ids_properties/homogeneous_time is , was expecting 0 or 1. >>> core_profiles.ids_properties.homogeneous_time = \\ - ... imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + ... imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS >>> # In homogeneous time mode, the root /time array is used >>> core_profiles.time = [0, 1] >>> core_profiles.profiles_1d.resize(2) @@ -103,7 +103,7 @@ is in homogeneous time mode or not. IMASPy handles this transparently. IDSNumericArray("/core_profiles/time", array([0., 1.])) >>> # But in heterogeneous time mode, profiles_1d/time is used instead >>> core_profiles.ids_properties.homogeneous_time = \\ - ... imaspy.ids_defs.IDS_TIME_MODE_HETEROGENEOUS + ... imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS >>> core_profiles.profiles_1d.coordinates[0] array([-9.e+40, -9.e+40]) @@ -116,36 +116,36 @@ used as a coordinate. For example, the ``distribution(i1)/profiles_2d(itime)/grid/r OR distribution(i1)/profiles_2d(itime)/grid/rho_tor_norm``. This means that either ``r`` or ``rho_tor_norm`` can be used as coordinate. When requesting such a -coordinate from IMASPy, four things may happen: +coordinate from imas-python, four things may happen: 1. When ``r`` is empty and ``rho_tor_norm`` not, ``coordinates[0]`` will return ``rho_tor_norm``. 2. When ``rho_tor_norm`` is empty and ``r`` not, ``coordinates[0]`` will return ``r``. -3. When both ``r`` and ``rho_tor_norm`` are not empty, IMASPy raises an error +3. When both ``r`` and ``rho_tor_norm`` are not empty, imas-python raises an error because it cannot determine which of the two coordinates should be used. -4. Similarly, an error is raised by IMASPy when neither ``r`` nor +4. Similarly, an error is raised by imas-python when neither ``r`` nor ``rho_tor_norm`` are set. .. seealso:: - API documentation for :py:class:`~imaspy.ids_coordinates.IDSCoordinates` + API documentation for :py:class:`~imas.ids_coordinates.IDSCoordinates` Query coordinate information '''''''''''''''''''''''''''' -In IMASPy you can query coordinate information in two ways: +In imas-python you can query coordinate information in two ways: 1. Directly query the coordinate attribute on the metadata: :code:`.metadata.coordinate2` gives you the coordinate information for the second dimension of the quantity. -2. Use the :py:attr:`~imaspy.ids_metadata.IDSMetadata.coordinates` attribute: +2. Use the :py:attr:`~imas.ids_metadata.IDSMetadata.coordinates` attribute: :code:`.metadata.coordinates` is a tuple containing all coordinate information for the quantity. The coordinate information from the Data Dictionary is parsed and stored in an -:py:class:`~imaspy.ids_coordinates.IDSCoordinate`. The Data Dictionary has +:py:class:`~imas.ids_coordinates.IDSCoordinate`. The Data Dictionary has several types of coordinate information: 1. When the coordinate is an index, the Data Dictionary indicates this via @@ -163,7 +163,7 @@ several types of coordinate information: .. code-block:: python :caption: Examples querying coordinate information - >>> pf_active = imaspy.IDSFactory().pf_active() + >>> pf_active = imas.IDSFactory().pf_active() >>> # coordinate1 of pf_active/coil is an index (the number of the coil) >>> pf_active.coil.metadata.coordinate1 IDSCoordinate('1...N') @@ -175,7 +175,7 @@ several types of coordinate information: .. seealso:: - API documentation for :py:class:`~imaspy.ids_coordinates.IDSCoordinate`. + API documentation for :py:class:`~imas.ids_coordinates.IDSCoordinate`. Query alternative coordinates @@ -183,7 +183,7 @@ Query alternative coordinates Starting in Data Dictionary 4.0, a coordinate quantity may indicate alternatives for itself. These alternatives are stored in the metadata attribute -:py:attr:`~imaspy.ids_metadata.IDSMetadata.alternative_coordinates`. +:py:attr:`~imas.ids_metadata.IDSMetadata.alternative_coordinates`. For example, most quantities in ``profiles_1d`` of the ``core_profiles`` IDS have ``profiles_1d/grid/rho_tor_norm`` as coordinate. However, there are alternatives @@ -193,9 +193,9 @@ the metadata of ``rho_tor_norm``: .. code-block:: python :caption: Showing alternative coordinates in Data Dictionary version 4.0.0 - >>> import imaspy + >>> import imas >>> import rich - >>> dd4 = imaspy.IDSFactory("4.0.0") + >>> dd4 = imas.IDSFactory("4.0.0") >>> core_profiles = dd4.core_profiles() >>> rich.print(cp.profiles_1d[0].grid.rho_tor_norm.metadata.alternative_coordinates) ( diff --git a/docs/source/multi-dd.rst b/docs/source/multi-dd.rst index 19d51b41..372aaae0 100644 --- a/docs/source/multi-dd.rst +++ b/docs/source/multi-dd.rst @@ -4,26 +4,26 @@ Using multiple DD versions in the same environment ================================================== Whereas the default IMAS High Level Interface is built for a single Data Dictionary -version, IMASPy can transparently handle multiple DD versions. +version, imas-python can transparently handle multiple DD versions. -By default, IMASPy uses the same Data Dictionary version as the loaded IMAS environment +By default, imas-python uses the same Data Dictionary version as the loaded IMAS environment is using, as specified by the environment variable ``IMAS_VERSION``. If no IMAS environment is loaded, the last available DD version is used. You can also explicitly specify which IMAS version you want to use when constructing a -:py:class:`~imaspy.db_entry.DBEntry` or :py:class:`~imaspy.ids_factory.IDSFactory`. For +:py:class:`~imas.db_entry.DBEntry` or :py:class:`~imas.ids_factory.IDSFactory`. For example: .. code-block:: python :caption: Using non-default IMAS versions. - import imaspy + import imas - factory_default = imaspy.IDSFactory() # Use default DD version - factory_3_32_0 = imaspy.IDSFactory("3.32.0") # Use DD version 3.32.0 + factory_default = imas.IDSFactory() # Use default DD version + factory_3_32_0 = imas.IDSFactory("3.32.0") # Use DD version 3.32.0 # Will write IDSs to the backend in DD version 3.32.0 - dbentry = imaspy.DBEntry(imaspy.ids_defs.HDF5_BACKEND, "TEST", 10, 2, version="3.32.0") + dbentry = imas.DBEntry(imas.ids_defs.HDF5_BACKEND, "TEST", 10, 2, version="3.32.0") dbentry.create() .. seealso:: :ref:`multi-dd training` @@ -34,13 +34,13 @@ example: Conversion of IDSs between DD versions -------------------------------------- -IMASPy can convert IDSs between different versions of the data dictionary. This uses the +imas-python can convert IDSs between different versions of the data dictionary. This uses the "non-backwards compatible changes" metadata from the DD definitions. There are two conversion modes: 1. Automatic conversion: this is handled when reading or writing data - (:py:meth:`~imaspy.db_entry.DBEntry.get`/:py:meth:`~imaspy.db_entry.DBEntry.get_slice`, - :py:meth:`~imaspy.db_entry.DBEntry.put`/:py:meth:`~imaspy.db_entry.DBEntry.put_slice`). + (:py:meth:`~imas.db_entry.DBEntry.get`/:py:meth:`~imas.db_entry.DBEntry.get_slice`, + :py:meth:`~imas.db_entry.DBEntry.put`/:py:meth:`~imas.db_entry.DBEntry.put_slice`). The DBEntry class automatically converts IDSs to the requested version: @@ -57,7 +57,7 @@ two conversion modes: are silently ignored. 2. Explicit conversion: this is achieved with a call to - :py:func:`imaspy.convert_ids `. + :py:func:`imas.convert_ids `. Automatic conversion is faster when reading data (up to a factor 2, depending on the backend and the stored data), but it doesn't support all conversion logic @@ -75,8 +75,8 @@ the backend and the stored data), but it doesn't support all conversion logic be more efficient to convert the data to your DD version, store it and then use it. This avoids conversion every time you read the data. - Converting an entire Data Entry can also be done with the IMASPy command - line interface. See :ref:`IMASPy Command Line tool`. + Converting an entire Data Entry can also be done with the imas-python command + line interface. See :ref:`imas-python Command Line tool`. Explicit conversion @@ -85,28 +85,28 @@ Explicit conversion .. code-block:: python :caption: Explicitly convert data when reading from disk - import imaspy + import imas - entry = imaspy.DBEntry("", "r") + entry = imas.DBEntry("", "r") # Disable automatic conversion when reading the IDS with autoconvert=False ids = entry.get("", autoconvert=False) # Explicitly convert the IDS to the target version - ids = imaspy.convert_ids(ids, "") + ids = imas.convert_ids(ids, "") .. code-block:: python :caption: Convert an IDS to a different DD version - import imaspy + import imas # Create a pulse_schedule IDS in version 3.23.0 - ps = imaspy.IDSFactory("3.25.0").new("pulse_schedule") + ps = imas.IDSFactory("3.25.0").new("pulse_schedule") ps.ec.antenna.resize(1) ps.ec.antenna[0].name = "IDS conversion test" # Convert the IDS to version 3.30.0 - ps330 = imaspy.convert_ids(ps, "3.30.0") + ps330 = imas.convert_ids(ps, "3.30.0") # ec.antenna was renamed to ec.launcher between 3.23.0 and 3.30.0 print(len(ps330.ec.launcher)) # 1 print(ps330.ec.launcher[0].name.value) # IDS conversion test @@ -114,7 +114,7 @@ Explicit conversion .. note:: Not all data may be converted. For example, when an IDS node is removed between DD - versions, the corresponding data is not copied. IMASPy provides logging to indicate + versions, the corresponding data is not copied. imas-python provides logging to indicate when this happens. @@ -181,16 +181,16 @@ explicit conversion mechanisms. Background information ---------------------- -Since IMASPy needs to have access to multiple DD versions it was chosen to +Since imas-python needs to have access to multiple DD versions it was chosen to bundle these with the code at build-time, in setup.py. If a git clone of the Data Dictionary succeeds, the setup tools automatically download saxon and generate ``IDSDef.xml`` for each of the tagged versions in the DD git repository. These are then gathered into ``IDSDef.zip``, which is -distributed inside the IMASPy package. +distributed inside the imas-python package. To update the set of data dictionaries new versions can be added to the zipfile. A reinstall of the package will ensure that all available versions are included -in IMASPy. Additionally an explicit path to an XML file can be specified, which +in imas-python. Additionally an explicit path to an XML file can be specified, which is useful for development. Automated tests have been provided that check the loading of all of the DD @@ -203,14 +203,14 @@ Extending the DD set Use the command ``python setup.py build_DD`` to build a new ``IDSDef.zip``. This fetches all tags from the data dictionary git repository and builds the ``IDSDef.zip``. -IMASPy searches for an ``IDSDef.zip`` in the following locations: +imas-python searches for an ``IDSDef.zip`` in the following locations: -1. The environment variable ``$IMASPY_DDZIP`` (path to a zip file) +1. The environment variable ``$IMAS_DDZIP`` (path to a zip file) 2. The file ``./IDSDef.zip`` in the current working directory -3. In the local configuration folder: ``~/.config/imaspy/IDSDef.zip``, or - ``$XDG_CONFIG_DIR/imaspy/IDSDef.zip`` (if the environment variable +3. In the local configuration folder: ``~/.config/imas/IDSDef.zip``, or + ``$XDG_CONFIG_DIR/imas/IDSDef.zip`` (if the environment variable ``$XDG_CONFIG_DIR`` is set) -4. The zipfile bundled with the IMASPy installation: ``assets/IDSDef.zip`` +4. The zipfile bundled with the imas-python installation: ``assets/IDSDef.zip`` All paths are searched in order when loading the definitions of a specific data dictionary version: the first zip file that contains the definitions of the requested diff --git a/docs/source/netcdf.rst b/docs/source/netcdf.rst index 7a7593e6..4ef62a2a 100644 --- a/docs/source/netcdf.rst +++ b/docs/source/netcdf.rst @@ -9,60 +9,60 @@ IMAS netCDF files netcdf/conventions -IMASPy supports reading IDSs from and writing IDSs to IMAS netCDF files. This +imas-python supports reading IDSs from and writing IDSs to IMAS netCDF files. This feature is currently in alpha status, and its functionality may change in -upcoming minor releases of IMASPy. +upcoming minor releases of imas-python. A detailed description of the IMAS netCDF format and conventions can be found on the :ref:`IMAS conventions for the netCDF data format` page. -Reading from and writing to netCDF files uses the same :py:class:`imaspy.DBEntry -` API as reading and writing to Access Layer backends. +Reading from and writing to netCDF files uses the same :py:class:`imas.DBEntry +` API as reading and writing to Access Layer backends. If you provide a path to a netCDF file (ending with ``.nc``) the netCDF backend -will be used for :py:meth:`~imaspy.db_entry.DBEntry.get` and -:py:meth:`~imaspy.db_entry.DBEntry.put` calls. See the below example: +will be used for :py:meth:`~imas.db_entry.DBEntry.get` and +:py:meth:`~imas.db_entry.DBEntry.put` calls. See the below example: .. code-block:: python :caption: Use DBEntry to write and read IMAS netCDF files - import imaspy + import imas - cp = imaspy.IDSFactory().core_profiles() - cp.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_INDEPENDENT + cp = imas.IDSFactory().core_profiles() + cp.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_INDEPENDENT cp.ids_properties.comment = "Test IDS" # This will create the `test.nc` file and stores the core_profiles IDS in it - with imaspy.DBEntry("test.nc", "w") as netcdf_entry: + with imas.DBEntry("test.nc", "w") as netcdf_entry: netcdf_entry.put(cp) # Reading back: - with imaspy.DBEntry("test.nc", "r") as netcdf_entry: + with imas.DBEntry("test.nc", "r") as netcdf_entry: cp2 = netcdf_entry.get("core_profiles") - imaspy.util.print_tree(cp2) + imas.util.print_tree(cp2) Using IMAS netCDF files with 3rd-party tools -------------------------------------------- -The netCDF files produces by IMASPy can be read with external tools. In this +The netCDF files produces by imas-python can be read with external tools. In this section we will show how to load data with the `xarray `__ package. Let's first create a small netCDF file in the current working directory based on -the IMASPy training data: +the imas-python training data: .. code-block:: python :caption: Store ``core_profiles`` training data in a netCDF file - import imaspy.training + import imas.training # Open the training entry - with imaspy.training.get_training_db_entry() as training_entry: + with imas.training.get_training_db_entry() as training_entry: # Load the core_profiles IDS core_profiles = training_entry.get("core_profiles") # Open a netCDF entry to store this IDS in: - with imaspy.DBEntry("core_profiles.nc", "w") as nc: + with imas.DBEntry("core_profiles.nc", "w") as nc: nc.put(core_profiles) If you execute this code snippet, you will find a file ``core_profiles.nc`` in @@ -107,6 +107,6 @@ your directory. Let's open this file with ``xarray.load_dataset``: Validating an IMAS netCDF file ------------------------------ -IMAS netCDF files can be validated with IMASPy through the command line ``imaspy -validate_nc ``. See also :ref:`IMASPy Command Line tool` or type -``imaspy validate_nc --help`` in a command line. +IMAS netCDF files can be validated with imas-python through the command line ``imas +validate_nc ``. See also :ref:`imas-python Command Line tool` or type +``imas validate_nc --help`` in a command line. diff --git a/docs/source/netcdf/conventions.rst b/docs/source/netcdf/conventions.rst index 99718b47..2dbbacb4 100644 --- a/docs/source/netcdf/conventions.rst +++ b/docs/source/netcdf/conventions.rst @@ -263,7 +263,7 @@ IMAS netCDF writers are recommended to overwrite the following metadata: - ``ids_properties.version_put.access_layer``: fill with ``"N/A"``, since this IDS is not written by the IMAS Access Layer. - ``ids_properties.version_put.access_layer_language``: fill with the name and - version of the netCDF writer, for example ``IMASPy 1.1.0``. + version of the netCDF writer, for example ``imas-python 1.1.0``. All other IDS metadata and provenance should be filled by the user or software that provides the IDS data. diff --git a/docs/source/release_imaspy.rst b/docs/source/release_imas.rst similarity index 72% rename from docs/source/release_imaspy.rst rename to docs/source/release_imas.rst index cc946ea4..4606118d 100644 --- a/docs/source/release_imaspy.rst +++ b/docs/source/release_imas.rst @@ -1,33 +1,26 @@ -IMASPy development and release process -====================================== +imas-python development and release process +=========================================== -IMASPy development follows the `Gitflow workflow -`_: +imas-python development follows the a fork-based model described in +`the contributing guidelines +`_. -1. New features, bug fixes, etc. are developed in a separate branch. Typically named - ``feature/``, ``bugfix/IMAS-XXXX``, etc. -2. When the feature is finished, a Pull Request to the ``develop`` branch is created. -3. The PR is reviewed and, after approval, changes are merged to ``develop``. -4. The ``main`` branch is updated only on releases, see below. +Creating an imas-python release +------------------------------- -Creating an IMASPy release --------------------------- - -1. Create a Pull Request from ``develop`` to ``main``. +1. Create a Pull Request using fork based workflow from ``develop`` to ``main``. 2. Add a change log to the Pull Request, briefly describing new features, bug fixes, - etc. See, for example, `this PR for version 0.8.0 - `_. -3. The PR is reviewed and merged by IO (currently Olivier Hoenen, who also creates the - release tags). + and update accordingly the :ref:`changelog`. +3. The PR is reviewed and merged by the maintainers who also create the release tags. 4. After the release PR is merged, update the Easybuild configurations for SDCC modules in the `easybuild-easyconfigs repository - `_. + `_. See the next section for more details on how to do this. -Updating and testing the IMASPy Easybuild configuration -------------------------------------------------------- +Updating and testing the imas-python Easybuild configuration +------------------------------------------------------------ The following steps can be taken on an SDCC login node. @@ -77,7 +70,7 @@ The following steps must be performed for each of the tool chains (currently a. Copy the ``.eb`` file from the previous release. b. Update the ``version`` to reflect the just-released version tag. - c. If any of the IMASPy dependencies in ``pyproject.toml`` where updated or changed + c. If any of the imas-python dependencies in ``pyproject.toml`` where updated or changed since the previous release, update the easybuild dependencies: - ``builddependencies`` contains build-time dependencies which are available @@ -86,7 +79,7 @@ The following steps must be performed for each of the tool chains (currently .. note:: The IMAS module is a build-time dependency only and not a runtime - dependency. This allows IMASPy users to load the IMASPy module and + dependency. This allows imas-python users to load the imas-python module and **any** supported IMAS module. - ``dependencies`` contains run-time dependencies which are available as a @@ -95,13 +88,13 @@ The following steps must be performed for each of the tool chains (currently dependencies of dependencies) which are not available in any of the Python modules on SDCC. - d. Update the checksum of imaspy: download an archive of the IMASPy repository from + d. Update the checksum of imas: download an archive of the imas-python repository from bitbucket. This is easiest to do by copying the following URL, replace ```` with the version tag, and paste it in a web browser: .. code-block:: text - https://git.iter.org/rest/api/latest/projects/IMAS/repos/imaspy/archive?at=refs/tags/&format=tar.gz + https://github.com/iterorganization/imas-python/archive/refs/heads/.tar.gz Then, calculate the hash of the downloaded archive with ``sha256sum`` and update it in the ``.eb`` file. @@ -127,7 +120,7 @@ The following steps must be performed for each of the tool chains (currently module purge module use ~/.local/easybuild/modules/all/ - module load IMASPy/- + module load imas-python/- module laod IMAS c. Sanity check the module, for example by running the ``pytest`` unit tests. diff --git a/docs/source/resampling.rst b/docs/source/resampling.rst index d788d5ad..22acb798 100644 --- a/docs/source/resampling.rst +++ b/docs/source/resampling.rst @@ -14,14 +14,14 @@ find the value of new points. This can be used like so: .. code-block:: python - pulse_schedule = imaspy.IDSFactory().new("pulse_schedule") + pulse_schedule = imas.IDSFactory().new("pulse_schedule") f = scipy.interpolate.interp1d(pulse_schedule.time, pulse_schedule_some_1d_var) ids.pulse_schedule.some_1d_var = f(pulse_schedule.some_1d_var) A more general approach would work on the basis of scanning the tree for shared coordinates, and resampling those in the same manner (by creating a -local interpolator and applying it). The :py:meth:`imaspy.util.visit_children` +local interpolator and applying it). The :py:meth:`imas.util.visit_children` method can be used for this. For a proof-of-concept it is recommended to only resample in the time direction. @@ -31,15 +31,15 @@ For example, a proposal implementation included in 0.4.0 can be used as such .. code-block:: python - import imaspy - nbi = imaspy.IDSFactory().new("nbi") - nbi.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + import imas + nbi = imas.IDSFactory().new("nbi") + nbi.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS nbi.time = [1, 2, 3] nbi.unit.resize(1) nbi.unit[0].energy.data = 2 * nbi.time old_id = id(nbi.unit[0].energy.data) - imaspy.util.resample( + imas.util.resample( nbi.unit[0].energy.data, nbi.time, [0.5, 1.5], @@ -56,14 +56,14 @@ Or as such (explicit in-memory copy + interpolation, producing a new data leaf/c .. code-block:: python - nbi = imaspy.IDSFactory().new("nbi") - nbi.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + nbi = imas.IDSFactory().new("nbi") + nbi.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS nbi.time = [1, 2, 3] nbi.unit.resize(1) nbi.unit[0].energy.data = 2 * nbi.time old_id = id(nbi.unit[0].energy.data) - new_data = imaspy.util.resample( + new_data = imas.util.resample( nbi.unit[0].energy.data, nbi.time, [0.5, 1.5], diff --git a/docs/source/validation.rst b/docs/source/validation.rst index 880c3402..472c70ca 100644 --- a/docs/source/validation.rst +++ b/docs/source/validation.rst @@ -3,30 +3,30 @@ IDS validation ============== -The IDSs you fill should be consistent. To help you in validating that, IMASPy has a -:py:meth:`~imaspy.ids_toplevel.IDSToplevel.validate` method that executes the following +The IDSs you fill should be consistent. To help you in validating that, imas-python has a +:py:meth:`~imas.ids_toplevel.IDSToplevel.validate` method that executes the following checks. .. contents:: Validation checks :local: :depth: 1 -If you call this method and your IDS fails validation, IMASPy raises an error explaining +If you call this method and your IDS fails validation, imas-python raises an error explaining the problem. See the following example: ->>> import imaspy ->>> core_profiles = imaspy.IDSFactory().core_profiles() +>>> import imas +>>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.validate() -imaspy.exception.ValidationError: Invalid value for ids_properties.homogeneous_time: -999999999 +imas.exception.ValidationError: Invalid value for ids_properties.homogeneous_time: -999999999 -IMASPy also automatically validates an IDS every time you do a -:py:meth:`~imaspy.db_entry.DBEntry.put` or -:py:meth:`~imaspy.db_entry.DBEntry.put_slice`. To disable this feature, you must set the +imas-python also automatically validates an IDS every time you do a +:py:meth:`~imas.db_entry.DBEntry.put` or +:py:meth:`~imas.db_entry.DBEntry.put_slice`. To disable this feature, you must set the environment variable ``IMAS_AL_DISABLE_VALIDATE`` to ``1``. .. seealso:: - API documentation: :py:meth:`IDSToplevel.validate() ` + API documentation: :py:meth:`IDSToplevel.validate() ` Validate the time mode diff --git a/imas/__init__.py b/imas/__init__.py new file mode 100644 index 00000000..b0b8f567 --- /dev/null +++ b/imas/__init__.py @@ -0,0 +1,31 @@ +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. + +# isort: skip_file + +from packaging.version import Version as _V + +from ._version import version as __version__ # noqa: F401 +from ._version import version_tuple # noqa: F401 + +# Import logging _first_ +from . import setup_logging + +# Import main user API objects in the imas module +from .db_entry import DBEntry +from .ids_factory import IDSFactory +from .ids_convert import convert_ids +from .ids_identifiers import identifiers + +# Load the imas-python IMAS AL/DD core +from . import ( + db_entry, + dd_helpers, + dd_zip, + util, +) + +PUBLISHED_DOCUMENTATION_ROOT = "https://imas-python.readthedocs.io/en/latest/" +"""URL to the published documentation.""" +OLDEST_SUPPORTED_VERSION = _V("3.22.0") +"""Oldest Data Dictionary version that is supported by imas-python.""" diff --git a/imas/__main__.py b/imas/__main__.py new file mode 100644 index 00000000..0b7834e5 --- /dev/null +++ b/imas/__main__.py @@ -0,0 +1,17 @@ +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Support module to run imas as a module: + +.. code-block:: bash + :caption: Options to run imas CLI interface + + # Run as a module (implemented in imas/__main__.py) + python -m imas + + # Run as "program" (see project.scripts in pyproject.toml) + imas +""" + +from imas.command.cli import cli + +cli() diff --git a/imaspy/_util.py b/imas/_util.py similarity index 94% rename from imaspy/_util.py rename to imas/_util.py index 44aee89e..82199275 100644 --- a/imaspy/_util.py +++ b/imas/_util.py @@ -1,8 +1,8 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """This file contains the implementation of all utility functions that need external modules. Implementation has been removed from util.py to improve the performance of -``import imaspy``. +``import imas``. """ import copy @@ -21,15 +21,15 @@ from rich.text import Text from rich.tree import Tree -from imaspy.ids_base import IDSBase -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS -from imaspy.ids_metadata import IDSMetadata -from imaspy.ids_primitive import IDSPrimitive -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure -from imaspy.ids_toplevel import IDSToplevel -from imaspy.util import idsdiffgen, visit_children +from imas.ids_base import IDSBase +from imas.ids_data_type import IDSDataType +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS +from imas.ids_metadata import IDSMetadata +from imas.ids_primitive import IDSPrimitive +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel +from imas.util import idsdiffgen, visit_children logger = logging.getLogger(__name__) diff --git a/imaspy/assets/IDSDef2MDSpreTree.xsl b/imas/assets/IDSDef2MDSpreTree.xsl similarity index 100% rename from imaspy/assets/IDSDef2MDSpreTree.xsl rename to imas/assets/IDSDef2MDSpreTree.xsl diff --git a/imaspy/assets/IDS_fake_toplevel.xml b/imas/assets/IDS_fake_toplevel.xml similarity index 100% rename from imaspy/assets/IDS_fake_toplevel.xml rename to imas/assets/IDS_fake_toplevel.xml diff --git a/imaspy/assets/IDS_minimal.xml b/imas/assets/IDS_minimal.xml similarity index 95% rename from imaspy/assets/IDS_minimal.xml rename to imas/assets/IDS_minimal.xml index 32d94e34..01764e95 100644 --- a/imaspy/assets/IDS_minimal.xml +++ b/imas/assets/IDS_minimal.xml @@ -3,7 +3,7 @@ 0.0.1 diff --git a/imaspy/assets/IDS_minimal_2.xml b/imas/assets/IDS_minimal_2.xml similarity index 95% rename from imaspy/assets/IDS_minimal_2.xml rename to imas/assets/IDS_minimal_2.xml index 9f38f5a4..57a90d23 100644 --- a/imaspy/assets/IDS_minimal_2.xml +++ b/imas/assets/IDS_minimal_2.xml @@ -3,7 +3,7 @@ 0.0.2 diff --git a/imaspy/assets/IDS_minimal_struct_array.xml b/imas/assets/IDS_minimal_struct_array.xml similarity index 96% rename from imaspy/assets/IDS_minimal_struct_array.xml rename to imas/assets/IDS_minimal_struct_array.xml index 5d644ee0..72845315 100644 --- a/imaspy/assets/IDS_minimal_struct_array.xml +++ b/imas/assets/IDS_minimal_struct_array.xml @@ -3,7 +3,7 @@ 0.0.1 diff --git a/imaspy/assets/IDS_minimal_types.xml b/imas/assets/IDS_minimal_types.xml similarity index 98% rename from imaspy/assets/IDS_minimal_types.xml rename to imas/assets/IDS_minimal_types.xml index 56cbbc57..d939aa32 100644 --- a/imaspy/assets/IDS_minimal_types.xml +++ b/imas/assets/IDS_minimal_types.xml @@ -3,7 +3,7 @@ 0.0.1 diff --git a/imaspy/assets/ITER_134173_106_core_profiles.ids b/imas/assets/ITER_134173_106_core_profiles.ids similarity index 100% rename from imaspy/assets/ITER_134173_106_core_profiles.ids rename to imas/assets/ITER_134173_106_core_profiles.ids diff --git a/imaspy/assets/ITER_134173_106_equilibrium.ids b/imas/assets/ITER_134173_106_equilibrium.ids similarity index 100% rename from imaspy/assets/ITER_134173_106_equilibrium.ids rename to imas/assets/ITER_134173_106_equilibrium.ids diff --git a/imaspy/assets/README.md b/imas/assets/README.md similarity index 100% rename from imaspy/assets/README.md rename to imas/assets/README.md diff --git a/imaspy/assets/core_profiles.ids b/imas/assets/core_profiles.ids similarity index 100% rename from imaspy/assets/core_profiles.ids rename to imas/assets/core_profiles.ids diff --git a/imaspy/assets/equilibrium.ids b/imas/assets/equilibrium.ids similarity index 100% rename from imaspy/assets/equilibrium.ids rename to imas/assets/equilibrium.ids diff --git a/imaspy/backends/__init__.py b/imas/backends/__init__.py similarity index 76% rename from imaspy/backends/__init__.py rename to imas/backends/__init__.py index 5fa32445..78cdd3f5 100644 --- a/imaspy/backends/__init__.py +++ b/imas/backends/__init__.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Logic for interacting with all data backends. Currently supported backends are: diff --git a/imaspy/backends/db_entry_impl.py b/imas/backends/db_entry_impl.py similarity index 83% rename from imaspy/backends/db_entry_impl.py rename to imas/backends/db_entry_impl.py index bc8ca10d..7e5dddef 100644 --- a/imaspy/backends/db_entry_impl.py +++ b/imas/backends/db_entry_impl.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. from abc import ABC, abstractmethod from dataclasses import dataclass @@ -7,9 +7,9 @@ import numpy -from imaspy.ids_convert import NBCPathMap -from imaspy.ids_factory import IDSFactory -from imaspy.ids_toplevel import IDSToplevel +from imas.ids_convert import NBCPathMap +from imas.ids_factory import IDSFactory +from imas.ids_toplevel import IDSToplevel @dataclass @@ -17,9 +17,9 @@ class GetSliceParameters: """Helper class to store parameters to get_slice.""" time_requested: float - """See :param:`imaspy.db_entry.DBEntry.get_slice.time_requested`.""" + """See :param:`imas.db_entry.DBEntry.get_slice.time_requested`.""" interpolation_method: int - """See :param:`imaspy.db_entry.DBEntry.get_slice.interpolation_method`.""" + """See :param:`imas.db_entry.DBEntry.get_slice.interpolation_method`.""" @dataclass @@ -27,13 +27,13 @@ class GetSampleParameters: """Helper class to store parameters to get_sample.""" tmin: float - """See :param:`imaspy.db_entry.DBEntry.get_sample.tmin`.""" + """See :param:`imas.db_entry.DBEntry.get_sample.tmin`.""" tmax: float - """See :param:`imaspy.db_entry.DBEntry.get_sample.tmax`.""" + """See :param:`imas.db_entry.DBEntry.get_sample.tmax`.""" dtime: Optional[numpy.ndarray] - """See :param:`imaspy.db_entry.DBEntry.get_sample.dtime`.""" + """See :param:`imas.db_entry.DBEntry.get_sample.dtime`.""" interpolation_method: Optional[int] - """See :param:`imaspy.db_entry.DBEntry.get_sample.interpolation_method`.""" + """See :param:`imas.db_entry.DBEntry.get_sample.interpolation_method`.""" class DBEntryImpl(ABC): diff --git a/imas/backends/imas_core/__init__.py b/imas/backends/imas_core/__init__.py new file mode 100644 index 00000000..5e7812f4 --- /dev/null +++ b/imas/backends/imas_core/__init__.py @@ -0,0 +1,4 @@ +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Subpackage implementing data access through the IMAS Access Layer Core. +""" diff --git a/imaspy/backends/imas_core/al_context.py b/imas/backends/imas_core/al_context.py similarity index 95% rename from imaspy/backends/imas_core/al_context.py rename to imas/backends/imas_core/al_context.py index 5d782fda..323cdd5d 100644 --- a/imaspy/backends/imas_core/al_context.py +++ b/imas/backends/imas_core/al_context.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Object-oriented interface to the IMAS lowlevel. """ @@ -10,9 +10,9 @@ import numpy -from imaspy.backends.imas_core.imas_interface import ll_interface -from imaspy.exception import LowlevelError -from imaspy.ids_defs import ( +from imas.backends.imas_core.imas_interface import ll_interface +from imas.exception import LowlevelError +from imas.ids_defs import ( CLOSEST_INTERP, LINEAR_INTERP, PREVIOUS_INTERP, @@ -27,8 +27,8 @@ ) if TYPE_CHECKING: - from imaspy.backends.imas_core.db_entry_al import ALDBEntryImpl - from imaspy.ids_convert import NBCPathMap + from imas.backends.imas_core.db_entry_al import ALDBEntryImpl + from imas.ids_convert import NBCPathMap logger = logging.getLogger(__name__) @@ -221,7 +221,7 @@ class LazyALContext: """Replacement for ALContext that is used during lazy loading. This class implements ``global_action``, ``slice_action`` and ``read_data``, such - that it can be used as a drop-in replacement in ``imaspy.db_entry._get_children`` + that it can be used as a drop-in replacement in ``imas.db_entry._get_children`` and only custom logic is needed for IDSStructArray there. This class tracks: @@ -233,7 +233,7 @@ class LazyALContext: arraystruct_action!). - The ALContext method and arguments that we need to call on the ALContext we obtain from our parent, to obtain the actual ALContext we should use for loading data. - - The NBC map that ``imaspy.db_entry._get_children`` needs when lazy loading + - The NBC map that ``imas.db_entry._get_children`` needs when lazy loading children of an IDSStructArray. When constructing a LazyALContext, you need to supply either the ``dbentry`` and @@ -298,7 +298,7 @@ def get_context(self) -> ALContext: if not cache or cache[-1] is not ctx: logger.warning( "Found an empty AL context cache: This should not happen, please " - "report this bug to the IMASPy developers." + "report this bug to the imas-python developers." ) else: return ctx diff --git a/imaspy/backends/imas_core/db_entry_al.py b/imas/backends/imas_core/db_entry_al.py similarity index 96% rename from imaspy/backends/imas_core/db_entry_al.py rename to imas/backends/imas_core/db_entry_al.py index 34a3ab32..e1d711a5 100644 --- a/imaspy/backends/imas_core/db_entry_al.py +++ b/imas/backends/imas_core/db_entry_al.py @@ -8,11 +8,11 @@ from typing import Any, Deque, List, Optional, Union from urllib.parse import urlparse -from imaspy.backends.db_entry_impl import GetSampleParameters, GetSliceParameters -from imaspy.db_entry import DBEntryImpl -from imaspy.exception import DataEntryException, LowlevelError -from imaspy.ids_convert import NBCPathMap, dd_version_map_from_factories -from imaspy.ids_defs import ( +from imas.backends.db_entry_impl import GetSampleParameters, GetSliceParameters +from imas.db_entry import DBEntryImpl +from imas.exception import DataEntryException, LowlevelError +from imas.ids_convert import NBCPathMap, dd_version_map_from_factories +from imas.ids_defs import ( ASCII_BACKEND, CHAR_DATA, CLOSE_PULSE, @@ -33,9 +33,9 @@ UNDEFINED_TIME, WRITE_OP, ) -from imaspy.ids_factory import IDSFactory -from imaspy.ids_metadata import IDSType -from imaspy.ids_toplevel import IDSToplevel +from imas.ids_factory import IDSFactory +from imas.ids_metadata import IDSType +from imas.ids_toplevel import IDSToplevel from .al_context import ALContext, LazyALContext from .db_entry_helpers import delete_children, get_children, put_children @@ -206,7 +206,8 @@ def _setup_backend( os.environ["IDSDEF_PATH"] = idsdef_path logger.warning( - "The UDA backend is not tested with IMASPy and may not work properly. " + "The UDA backend is not tested with " + "imas-python and may not work properly. " "Please raise any issues you find." ) @@ -214,7 +215,7 @@ def _setup_backend( pass # nothing to set up else: - logger.warning("Backend %s is unknown to IMASPy", backend) + logger.warning("Backend %s is unknown to imas-python", backend) def close(self, *, erase: bool = False) -> None: if self._db_ctx is None: diff --git a/imaspy/backends/imas_core/db_entry_helpers.py b/imas/backends/imas_core/db_entry_helpers.py similarity index 94% rename from imaspy/backends/imas_core/db_entry_helpers.py rename to imas/backends/imas_core/db_entry_helpers.py index f69eafd3..d6e3a596 100644 --- a/imaspy/backends/imas_core/db_entry_helpers.py +++ b/imas/backends/imas_core/db_entry_helpers.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Helper methods for loading data from and storing data to Data Entries. """ @@ -7,13 +7,13 @@ import numpy as np -from imaspy.ids_base import IDSBase -from imaspy.ids_convert import NBCPathMap -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS, IDS_TIME_MODE_INDEPENDENT -from imaspy.ids_metadata import IDSMetadata -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure +from imas.ids_base import IDSBase +from imas.ids_convert import NBCPathMap +from imas.ids_data_type import IDSDataType +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS, IDS_TIME_MODE_INDEPENDENT +from imas.ids_metadata import IDSMetadata +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure from .al_context import ALContext, LazyALContext diff --git a/imaspy/backends/imas_core/imas_interface.py b/imas/backends/imas_core/imas_interface.py similarity index 87% rename from imaspy/backends/imas_core/imas_interface.py rename to imas/backends/imas_core/imas_interface.py index b92438b1..05634dfb 100644 --- a/imaspy/backends/imas_core/imas_interface.py +++ b/imas/backends/imas_core/imas_interface.py @@ -1,15 +1,13 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """ Helper module for providing a version-independent interface to the Access Layer. This module tries to abstract away most API incompatibilities between the supported Access Layer versions (for example the rename of _ual_lowlevel to _al_lowlevel). """ -import importlib import inspect import logging -import time from packaging.version import Version @@ -28,30 +26,15 @@ if enable_exceptions: enable_exceptions() -except ImportError: - # Fallback for AL 4.x or 5.0/5.1 - try: - tic = time.time() - # Don't directly `import imas`: code analyzers will break on the huge code base - imas = importlib.import_module("imas") - logger.info( - "Successfully imported `imas` (took %.3f seconds)", time.time() - tic - ) - try: - lowlevel = imas._al_lowlevel # AL 5.0/5.1 - except AttributeError: - lowlevel = imas._ual_lowlevel # AL 4.x - imasdef = imas.imasdef - - except ImportError as exc: - imas = None - has_imas = False - imasdef = None - lowlevel = None - logger.critical( - "Could not import 'imas': %s. Some functionality is not available.", - exc, - ) +except ImportError as exc: + imas = None + has_imas = False + imasdef = None + lowlevel = None + logger.critical( + "Could not import 'al_core': %s. Some functionality is not available.", + exc, + ) class LLInterfaceError(RuntimeError): @@ -61,8 +44,8 @@ class LLInterfaceError(RuntimeError): class LowlevelInterface: """Compatibility object. - Provides a stable API for the rest of IMASPy even when the `imas.lowlevel` interface - changes. + Provides a stable API for the rest of imas-python even when the + `imas.lowlevel` interface changes. .. rubric:: Developer notes @@ -230,4 +213,4 @@ def begin_timerange_action( func.__doc__ = f"Wrapper function for AL lowlevel method ``{funcname}``" ll_interface = LowlevelInterface(lowlevel) -"""IMASPy <-> IMAS lowlevel interface""" +"""imas-python <-> IMAS lowlevel interface""" diff --git a/imaspy/backends/imas_core/mdsplus_model.py b/imas/backends/imas_core/mdsplus_model.py similarity index 83% rename from imaspy/backends/imas_core/mdsplus_model.py rename to imas/backends/imas_core/mdsplus_model.py index 4d96bf6c..9b00f34c 100644 --- a/imaspy/backends/imas_core/mdsplus_model.py +++ b/imas/backends/imas_core/mdsplus_model.py @@ -1,5 +1,5 @@ # Helper functions to create MDSPlus reference models -# and store them in a cache directory (.cache/imaspy/MDSPlus/name-HASH/) +# and store them in a cache directory (.cache/imas/MDSPlus/name-HASH/) """Module for generating and working with MDSplus models. """ @@ -13,6 +13,7 @@ import time import uuid from pathlib import Path +from saxonche import PySaxonProcessor from subprocess import CalledProcessError, check_output from zlib import crc32 @@ -21,11 +22,9 @@ except ImportError: # Python 3.8 support from importlib_resources import as_file, files -import imaspy -from imaspy.dd_helpers import get_saxon -from imaspy.dd_zip import get_dd_xml, get_dd_xml_crc -from imaspy.exception import MDSPlusModelError -from imaspy.ids_factory import IDSFactory +from imas.dd_zip import get_dd_xml, get_dd_xml_crc +from imas.exception import MDSPlusModelError +from imas.ids_factory import IDSFactory logger = logging.getLogger(__name__) @@ -86,12 +85,7 @@ def mdsplus_model_dir(factory: IDSFactory) -> str: Given a filename and xml contents create an xml - document for the mdsplus model by running a command like the below: - - java net.sf.saxon.Transform -s:- -xsl: -o:${OUTPUT_FILE} - - with ENV: - env={"CLASSPATH": saxon_jar_path, "PATH": os.environ.get("PATH", "")} + document for the mdsplus model by rusing saxonche Args: factory: IDSFactory indicating the DD version / XML to build models for. @@ -117,14 +111,14 @@ def mdsplus_model_dir(factory: IDSFactory) -> str: crc = crc32(file.read()) cache_dir_name = "%s-%08x" % (xml_name, crc) - cache_dir_path = Path(_get_xdg_cache_dir()) / "imaspy" / "mdsplus" / cache_dir_name + cache_dir_path = Path(_get_xdg_cache_dir()) / "imas" / "mdsplus" / cache_dir_name # TODO: include hash or version of "IDSDef2MDSpreTree.xsl", which we should fetch # from the access layer instead of provide ourselves, if we wish to be resilient to # upgrades there (has happened early 2021 already once). of course, upgrades to the # on-disk formats should be versioned and documented properly, so this should never # happen again. - # There are multiple possible cases for the IMASPy cache + # There are multiple possible cases for the imas-python cache # 1. The cache exist and can be used # 2. The cache folder exists, and another process is creating it # 3. The cache folder exists, but the process creating it has stopped @@ -138,7 +132,7 @@ def mdsplus_model_dir(factory: IDSFactory) -> str: tmp_cache_dir_path = ( Path(tempfile.gettempdir()) / getpass.getuser() - / "imaspy" + / "imas" / "mdsplus" / f"{cache_dir_name}_{fuuid}" ) @@ -164,7 +158,7 @@ def mdsplus_model_dir(factory: IDSFactory) -> str: os.listdir(cache_dir_path), ) raise MDSPlusModelError( - "The IMASPy cache directory is corrupted. Please clean the" + "The imas-python cache directory is corrupted. Please clean the" f" cache directory ({cache_dir_path}) and try again." ) elif not cache_dir_path.is_dir() and not model_exists(cache_dir_path): @@ -242,30 +236,50 @@ def model_exists(path: Path) -> bool: def create_model_ids_xml(cache_dir_path, fname, version): - """Use saxon to compile an ids.xml suitable for creating an mdsplus model.""" - + """Use Saxon/C to compile an ids.xml suitable for creating an MDSplus model.""" try: - # we have to be careful to have the same version of this file as in the access - # layer: - with as_file(files(imaspy) / "assets" / "IDSDef2MDSpreTree.xsl") as xslfile: - check_output( - [ - "java", - "net.sf.saxon.Transform", - "-s:" + str(fname), - "-o:" + str(Path(cache_dir_path) / "ids.xml"), - "DD_GIT_DESCRIBE=" + str(version or fname), - # if this is expected as git describe it might break - # if we just pass a filename - "AL_GIT_DESCRIBE=" + os.environ.get("AL_VERSION", "0.0.0"), - "-xsl:" + str(xslfile), - ], - input=get_dd_xml(version) if version else None, - env={"CLASSPATH": get_saxon(), "PATH": os.environ.get("PATH", "")}, - ) - except CalledProcessError as e: + with as_file(files("imas") / "assets" / "IDSDef2MDSpreTree.xsl") as xslfile: + output_file = Path(cache_dir_path) / "ids.xml" + + with PySaxonProcessor(license=False) as proc: + xslt_processor = proc.new_xslt30_processor() + + xslt_processor.compile_stylesheet(stylesheet_file=str(xslfile)) + + input_xml = get_dd_xml(version) if version else None + if fname: + source_file = str(fname) + elif input_xml: + source_file = input_xml # Use standard input for the XML string + else: + raise ValueError( + "Either 'fname' or 'version' must be provided to generate XML." + ) + + # xdm_ddgit = proc.make_string_value(str(version or fname)) + # xsltproc.set_parameter("DD_GIT_DESCRIBE", xdm_ddgit) + # xdm_algit = proc.make_string_value(os.environ.get + # ("AL_VERSION", "0.0.0")) + # xsltproc.set_parameter("AL_GIT_DESCRIBE", xdm_algit) + # Transform XML + result = xslt_processor.transform_to_file( + source_file=source_file, + output_file=str(output_file), + initial_template_params={ + "DD_GIT_DESCRIBE": str(version or fname), + "AL_GIT_DESCRIBE": os.environ.get("AL_VERSION", "0.0.0"), + }, + ) + + if result is False: + logger.error( + "Transformation failed: Check Saxon/C logs for details." + ) + raise RuntimeError("Saxon/C XSLT transformation failed.") + + except Exception as e: if fname: - logger.error("Error making MDSPlus model IDS.xml for %s", fname) + logger.error("Error making MDSplus model IDS.xml for %s", fname) else: logger.error("Error making MDSplus model IDS.xml for %s", version) raise e diff --git a/imaspy/backends/imas_core/uda_support.py b/imas/backends/imas_core/uda_support.py similarity index 86% rename from imaspy/backends/imas_core/uda_support.py rename to imas/backends/imas_core/uda_support.py index 8b599faa..f051f549 100644 --- a/imaspy/backends/imas_core/uda_support.py +++ b/imas/backends/imas_core/uda_support.py @@ -3,7 +3,7 @@ from typing import Union from xml.etree import ElementTree as ET -from imaspy import dd_zip +from imas import dd_zip from .mdsplus_model import _get_xdg_cache_dir @@ -31,11 +31,11 @@ def get_dd_version_from_idsdef_xml(path: Union[str, Path]) -> str: def extract_idsdef(dd_version: str) -> str: """Extract the IDSDef.xml for the given version and return its path. - The IDSDef.xml is extracted to the imaspy cache folder: + The IDSDef.xml is extracted to the imas cache folder: - - If the file imaspy/uda/.xml already exists, we assume it is correct + - If the file imas/uda/.xml already exists, we assume it is correct """ - cache_dir_path = Path(_get_xdg_cache_dir()) / "imaspy" / "uda" + cache_dir_path = Path(_get_xdg_cache_dir()) / "imas" / "uda" cache_dir_path.mkdir(parents=True, exist_ok=True) # ensure cache folder exists idsdef_path = cache_dir_path / (dd_version + ".xml") diff --git a/imas/backends/netcdf/__init__.py b/imas/backends/netcdf/__init__.py new file mode 100644 index 00000000..86cc929e --- /dev/null +++ b/imas/backends/netcdf/__init__.py @@ -0,0 +1,4 @@ +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""NetCDF IO support for imas-python. Requires [netcdf] extra dependencies. +""" diff --git a/imaspy/backends/netcdf/db_entry_nc.py b/imas/backends/netcdf/db_entry_nc.py similarity index 93% rename from imaspy/backends/netcdf/db_entry_nc.py rename to imas/backends/netcdf/db_entry_nc.py index 732eb97d..8e37c464 100644 --- a/imaspy/backends/netcdf/db_entry_nc.py +++ b/imas/backends/netcdf/db_entry_nc.py @@ -3,17 +3,17 @@ import logging from typing import List, Optional, Union -from imaspy.backends.db_entry_impl import ( +from imas.backends.db_entry_impl import ( DBEntryImpl, GetSampleParameters, GetSliceParameters, ) -from imaspy.backends.netcdf.ids2nc import IDS2NC -from imaspy.backends.netcdf.nc2ids import NC2IDS -from imaspy.exception import DataEntryException, InvalidNetCDFEntry -from imaspy.ids_convert import NBCPathMap, convert_ids -from imaspy.ids_factory import IDSFactory -from imaspy.ids_toplevel import IDSToplevel +from imas.backends.netcdf.ids2nc import IDS2NC +from imas.backends.netcdf.nc2ids import NC2IDS +from imas.exception import DataEntryException, InvalidNetCDFEntry +from imas.ids_convert import NBCPathMap, convert_ids +from imas.ids_factory import IDSFactory +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -31,7 +31,7 @@ def __init__(self, fname: str, mode: str, factory: IDSFactory) -> None: if netCDF4 is None: raise RuntimeError( "The `netCDF4` python module is not available. Please install this " - "module to read/write IMAS netCDF files with IMASPy." + "module to read/write IMAS netCDF files with imas-python." ) self._dataset = netCDF4.Dataset( diff --git a/imaspy/backends/netcdf/ids2nc.py b/imas/backends/netcdf/ids2nc.py similarity index 96% rename from imaspy/backends/netcdf/ids2nc.py rename to imas/backends/netcdf/ids2nc.py index 34e63101..e56c32a0 100644 --- a/imaspy/backends/netcdf/ids2nc.py +++ b/imas/backends/netcdf/ids2nc.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""NetCDF IO support for IMASPy. Requires [netcdf] extra dependencies. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""NetCDF IO support for imas-python. Requires [netcdf] extra dependencies. """ from typing import Iterator, Tuple @@ -8,13 +8,13 @@ import netCDF4 import numpy -from imaspy.backends.netcdf.nc_metadata import NCMetadata -from imaspy.ids_base import IDSBase -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure -from imaspy.ids_toplevel import IDSToplevel +from imas.backends.netcdf.nc_metadata import NCMetadata +from imas.ids_base import IDSBase +from imas.ids_data_type import IDSDataType +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel default_fillvals = { IDSDataType.INT: netCDF4.default_fillvals["i4"], diff --git a/imaspy/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py similarity index 95% rename from imaspy/backends/netcdf/nc2ids.py rename to imas/backends/netcdf/nc2ids.py index 50905ba8..50668dfb 100644 --- a/imaspy/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -4,15 +4,15 @@ import netCDF4 -from imaspy.backends.netcdf import ids2nc -from imaspy.backends.netcdf.nc_metadata import NCMetadata -from imaspy.exception import InvalidNetCDFEntry -from imaspy.ids_base import IDSBase -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS -from imaspy.ids_metadata import IDSMetadata -from imaspy.ids_structure import IDSStructure -from imaspy.ids_toplevel import IDSToplevel +from imas.backends.netcdf import ids2nc +from imas.backends.netcdf.nc_metadata import NCMetadata +from imas.exception import InvalidNetCDFEntry +from imas.ids_base import IDSBase +from imas.ids_data_type import IDSDataType +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS +from imas.ids_metadata import IDSMetadata +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -164,7 +164,7 @@ def run(self) -> None: def validate_variables(self) -> None: """Validate that all variables in the netCDF Group exist and match the DD.""" - disable_validate = os.environ.get("IMASPY_DISABLE_NC_VALIDATE") + disable_validate = os.environ.get("IMAS_DISABLE_NC_VALIDATE") if disable_validate and disable_validate != "0": logger.info( "NetCDF file validation disabled: " diff --git a/imaspy/backends/netcdf/nc_metadata.py b/imas/backends/netcdf/nc_metadata.py similarity index 98% rename from imaspy/backends/netcdf/nc_metadata.py rename to imas/backends/netcdf/nc_metadata.py index 06b71e3e..50545f8c 100644 --- a/imaspy/backends/netcdf/nc_metadata.py +++ b/imas/backends/netcdf/nc_metadata.py @@ -1,14 +1,14 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """NetCDF metadata for dimensions and tensorization of IDSs. """ from functools import lru_cache from typing import Dict, List, Optional, Set, Tuple -from imaspy.ids_coordinates import IDSCoordinate -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_metadata import IDSMetadata +from imas.ids_coordinates import IDSCoordinate +from imas.ids_data_type import IDSDataType +from imas.ids_metadata import IDSMetadata def _get_aos_label_coordinates(metadata: IDSMetadata) -> List[str]: @@ -153,7 +153,7 @@ def _parse_dimensions(self, metadata: IDSMetadata, aos_level: int) -> None: """Parse dimensions and auxiliary coordinates from DD coordinate metadata. DD coordinates come in different flavours (see also - :mod:`imaspy.ids_coordinates`), which we handle in this function: + :mod:`imas.ids_coordinates`), which we handle in this function: 1. Coordinate is an index. diff --git a/imaspy/backends/netcdf/nc_validate.py b/imas/backends/netcdf/nc_validate.py similarity index 91% rename from imaspy/backends/netcdf/nc_validate.py rename to imas/backends/netcdf/nc_validate.py index 49a14283..55dbbf2b 100644 --- a/imaspy/backends/netcdf/nc_validate.py +++ b/imas/backends/netcdf/nc_validate.py @@ -1,7 +1,7 @@ -from imaspy.backends.netcdf.db_entry_nc import NCDBEntryImpl -from imaspy.backends.netcdf.nc2ids import NC2IDS -from imaspy.db_entry import DBEntry -from imaspy.exception import InvalidNetCDFEntry +from imas.backends.netcdf.db_entry_nc import NCDBEntryImpl +from imas.backends.netcdf.nc2ids import NC2IDS +from imas.db_entry import DBEntry +from imas.exception import InvalidNetCDFEntry def validate_netcdf_file(filename: str) -> None: diff --git a/imaspy/command/cli.py b/imas/command/cli.py similarity index 83% rename from imaspy/command/cli.py rename to imas/command/cli.py index f894f02d..565262ee 100644 --- a/imaspy/command/cli.py +++ b/imas/command/cli.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """ Main CLI entry point """ import logging @@ -20,14 +20,14 @@ ) from rich.table import Table -import imaspy -import imaspy.backends.imas_core.imas_interface -from imaspy import DBEntry, dd_zip -from imaspy.backends.imas_core.imas_interface import ll_interface -from imaspy.command.db_analysis import analyze_db, process_db_analysis -from imaspy.command.helpers import min_version_guard, setup_rich_log_handler -from imaspy.command.timer import Timer -from imaspy.exception import UnknownDDVersion +import imas +import imas.backends.imas_core.imas_interface +from imas import DBEntry, dd_zip +from imas.backends.imas_core.imas_interface import ll_interface +from imas.command.db_analysis import analyze_db, process_db_analysis +from imas.command.helpers import min_version_guard, setup_rich_log_handler +from imas.command.timer import Timer +from imas.exception import UnknownDDVersion logger = logging.getLogger(__name__) @@ -42,14 +42,14 @@ def _excepthook(type_, value, tb): console.Console(stderr=True).print(rich_tb) -@click.group("imaspy", invoke_without_command=True, no_args_is_help=True) +@click.group("imas", invoke_without_command=True, no_args_is_help=True) def cli(): - """IMASPy command line interface. + """imas-python command line interface. Please use one of the available commands listed below. You can get help for each command by executing: - imaspy --help + imas --help """ # Limit the traceback to 1 item: avoid scaring CLI users with long traceback prints # and let them focus on the actual error message @@ -62,19 +62,24 @@ def cli(): @cli.command("version") def print_version(): - """Print version information of IMASPy.""" + """Print version information of imas-python.""" cons = console.Console() - grid = Table(title="IMASPy version info", show_header=False, title_style="bold") + grid = Table( + title="imas-python version info", show_header=False, title_style="bold" + ) grid.box = box.HORIZONTALS if cons.size.width > 120: grid.width = 120 - grid.add_row("IMASPy version:", imaspy.__version__) + grid.add_row("imas-python version:", imas.__version__) grid.add_section() - grid.add_row("Default data dictionary version:", imaspy.IDSFactory().dd_version) - dd_versions = ", ".join(imaspy.dd_zip.dd_xml_versions()) + grid.add_row("Default data dictionary version:", imas.IDSFactory().dd_version) + dd_versions = ", ".join(imas.dd_zip.dd_xml_versions()) grid.add_row("Available data dictionary versions:", dd_versions) grid.add_section() - grid.add_row("Access Layer core version:", ll_interface.get_al_version() or "N/A") + try: + grid.add_row("Access Layer core version:", ll_interface.get_al_version()) + except Exception: + grid.add_row("Access Layer core version:", "N/A") console.Console().print(grid) @@ -102,7 +107,7 @@ def print_ids(uri, ids, occurrence, print_all): with DBEntry(uri, "r") as dbentry: ids_obj = dbentry.get(ids, occurrence, autoconvert=False) - imaspy.util.print_tree(ids_obj, not print_all) + imas.util.print_tree(ids_obj, not print_all) @cli.command("convert", no_args_is_help=True) @@ -131,7 +136,7 @@ def convert_ids( Provide a different backend to URI_OUT than URI_IN to convert between backends. For example: - imaspy convert imas:mdsplus?path=db-in 3.41.0 imas:hdf5?path=db-out + imas convert imas:mdsplus?path=db-in 3.41.0 imas:hdf5?path=db-out \b uri_in URI of the input Data Entry. @@ -198,7 +203,7 @@ def convert_ids( ids2 = ids else: with timer("Convert", name): - ids2 = imaspy.convert_ids( + ids2 = imas.convert_ids( ids, None, factory=entry_out.factory, @@ -222,7 +227,7 @@ def convert_ids( @click.argument("filename", type=click.Path(exists=True, dir_okay=False)) def validate_nc(filename): """Validate if the provided netCDF file adheres to the IMAS conventions.""" - from imaspy.backends.netcdf.nc_validate import validate_netcdf_file + from imas.backends.netcdf.nc_validate import validate_netcdf_file try: validate_netcdf_file(filename) diff --git a/imaspy/command/db_analysis.py b/imas/command/db_analysis.py similarity index 96% rename from imaspy/command/db_analysis.py rename to imas/command/db_analysis.py index e687f94e..5cc946dc 100644 --- a/imaspy/command/db_analysis.py +++ b/imas/command/db_analysis.py @@ -1,4 +1,4 @@ -"""IMASPy-based command line tool for analysing fields in a database.""" +"""imas-python-based command line tool for analysing fields in a database.""" import gzip import json @@ -19,9 +19,9 @@ import rich.text import rich.tree -import imaspy -from imaspy.command.helpers import setup_rich_log_handler -from imaspy.ids_metadata import IDSMetadata +import imas +from imas.command.helpers import setup_rich_log_handler +from imas.ids_metadata import IDSMetadata directory_path = click.Path(exists=True, file_okay=False, path_type=Path) outfile_path = click.Path(dir_okay=False, writable=True, path_type=Path) @@ -36,7 +36,7 @@ "--output", "-o", type=outfile_path, - default="imaspy-db-analysis.json.gz", + default="imas-db-analysis.json.gz", help="Output file", ) def analyze_db(dbentry: Iterable[Path], output: Path) -> None: @@ -60,7 +60,7 @@ def analyze_db(dbentry: Iterable[Path], output: Path) -> None: data, the IDSs are inspected by looking at the HDF5 files directly. 2. This tool uses the optional `h5py` dependency. An error is raised when this package is not available. - 3. If your data is stored in another format than HDF5, you may use `imaspy convert` + 3. If your data is stored in another format than HDF5, you may use `imas convert` to convert the data into the HDF5 backend format first. """ # Test if h5py is available @@ -149,11 +149,11 @@ def process_db_analysis(infiles, show_empty_ids): \b Arguments: - INPUT_FILES File(s) produced by `imaspy analyze-db` to process. + INPUT_FILES File(s) produced by `imas analyze-db` to process. """ setup_rich_log_handler(False) - factory = imaspy.IDSFactory() + factory = imas.IDSFactory() filled_per_ids = {ids_name: set() for ids_name in factory.ids_names()} logger.info("Using Data Dictionary version %s.", factory.dd_version) logger.info("Reading %d input files...", len(infiles)) diff --git a/imaspy/command/helpers.py b/imas/command/helpers.py similarity index 69% rename from imaspy/command/helpers.py rename to imas/command/helpers.py index d3009ce4..f43a47e1 100644 --- a/imaspy/command/helpers.py +++ b/imas/command/helpers.py @@ -5,28 +5,28 @@ from packaging.version import Version from rich.logging import RichHandler -from imaspy.backends.imas_core.imas_interface import ll_interface +from imas.backends.imas_core.imas_interface import ll_interface def setup_rich_log_handler(quiet: bool): """Setup rich.logging.RichHandler on the root logger. Args: - quiet: When True: set log level of the `imaspy` logger to WARNING or higher. + quiet: When True: set log level of the `imas` logger to WARNING or higher. """ - # Disable default imaspy log handler - imaspy_logger = logging.getLogger("imaspy") - for handler in imaspy_logger.handlers: - imaspy_logger.removeHandler(handler) + # Disable default imas log handler + imas_logger = logging.getLogger("imas") + for handler in imas_logger.handlers: + imas_logger.removeHandler(handler) # Disable any root log handlers root_logger = logging.getLogger() for handler in root_logger.handlers: root_logger.removeHandler(handler) # Install rich handler on the root logger: root_logger.addHandler(RichHandler()) - if quiet: # Silence IMASPy INFO messages + if quiet: # Silence imas-python INFO messages # If loglevel is less than WARNING, set it to WARNING: - imaspy_logger.setLevel(max(logging.WARNING, imaspy_logger.getEffectiveLevel())) + imas_logger.setLevel(max(logging.WARNING, imas_logger.getEffectiveLevel())) def min_version_guard(al_version: Version): diff --git a/imaspy/command/timer.py b/imas/command/timer.py similarity index 95% rename from imaspy/command/timer.py rename to imas/command/timer.py index be9b21f7..9f43ee55 100644 --- a/imaspy/command/timer.py +++ b/imas/command/timer.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Utility class to time different sections of a CLI app.""" import time diff --git a/imaspy/db_entry.py b/imas/db_entry.py similarity index 91% rename from imaspy/db_entry.py rename to imas/db_entry.py index 3834655d..899dfd83 100644 --- a/imaspy/db_entry.py +++ b/imas/db_entry.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Logic for interacting with IMAS Data Entries. """ @@ -9,17 +9,17 @@ import numpy -import imaspy -from imaspy.backends.db_entry_impl import ( +import imas +from imas.backends.db_entry_impl import ( DBEntryImpl, GetSampleParameters, GetSliceParameters, ) -from imaspy.dd_zip import dd_xml_versions -from imaspy.exception import IDSNameError, UnknownDDVersion, ValidationError -from imaspy.ids_base import IDSBase -from imaspy.ids_convert import dd_version_map_from_factories -from imaspy.ids_defs import ( +from imas.dd_zip import dd_xml_versions +from imas.exception import IDSNameError, UnknownDDVersion, ValidationError +from imas.ids_base import IDSBase +from imas.ids_convert import dd_version_map_from_factories +from imas.ids_defs import ( CREATE_PULSE, FORCE_CREATE_PULSE, FORCE_OPEN_PULSE, @@ -27,9 +27,9 @@ IDS_TIME_MODES, OPEN_PULSE, ) -from imaspy.ids_factory import IDSFactory -from imaspy.ids_metadata import IDSType -from imaspy.ids_toplevel import IDSToplevel +from imas.ids_factory import IDSFactory +from imas.ids_metadata import IDSType +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -53,17 +53,17 @@ class DBEntry: .. code-block:: python - import imaspy + import imas - # AL4-style constructor: - with imaspy.DBEntry(imaspy.ids_defs.HDF5_BACKEND, "test", 1, 12) as dbentry: + # old constructor: + with imas.DBEntry(imas.ids_defs.HDF5_BACKEND, "test", 1, 12) as dbentry: # dbentry is now opened and can be used for reading data: ids = dbentry.get(...) # The dbentry is now closed - # AL5-style constructor also allows creating the Data Entry with the mode + # new constructor also allows creating the Data Entry with the mode # argument - with imaspy.DBEntry("imas:hdf5?path=testdb", "w") as dbentry: + with imas.DBEntry("imas:hdf5?path=testdb", "w") as dbentry: # dbentry is now created and can be used for writing data: dbentry.put(ids) # The dbentry is now closed @@ -190,9 +190,9 @@ def __init__( def _select_implementation(uri: Optional[str]) -> Type[DBEntryImpl]: """Select which DBEntry implementation to use based on the URI.""" if uri and uri.endswith(".nc") and not uri.startswith("imas:"): - from imaspy.backends.netcdf.db_entry_nc import NCDBEntryImpl as impl + from imas.backends.netcdf.db_entry_nc import NCDBEntryImpl as impl else: - from imaspy.backends.imas_core.db_entry_al import ALDBEntryImpl as impl + from imas.backends.imas_core.db_entry_al import ALDBEntryImpl as impl return impl def __enter__(self): @@ -243,10 +243,10 @@ def create(self, *, options=None, force=True) -> None: Example: .. code-block:: python - import imaspy - from imaspy.ids_defs import HDF5_BACKEND + import imas + from imas.ids_defs import HDF5_BACKEND - imas_entry = imaspy.DBEntry(HDF5_BACKEND, "test", 1, 1234) + imas_entry = imas.DBEntry(HDF5_BACKEND, "test", 1, 1234) imas_entry.create() """ self._open_pulse(FORCE_CREATE_PULSE if force else CREATE_PULSE, options) @@ -263,10 +263,10 @@ def open(self, mode=OPEN_PULSE, *, options=None, force=False) -> None: Example: .. code-block:: python - import imaspy - from imaspy.ids_defs import HDF5_BACKEND + import imas + from imas.ids_defs import HDF5_BACKEND - imas_entry = imaspy.DBEntry(HDF5_BACKEND, "test", 1, 1234) + imas_entry = imas.DBEntry(HDF5_BACKEND, "test", 1, 1234) imas_entry.open() """ if force: @@ -345,9 +345,9 @@ def get( Example: .. code-block:: python - import imaspy + import imas - imas_entry = imaspy.DBEntry(imaspy.ids_defs.MDSPLUS_BACKEND, "ITER", 131024, 41, "public") + imas_entry = imas.DBEntry(imas.ids_defs.MDSPLUS_BACKEND, "ITER", 131024, 41, "public") imas_entry.open() core_profiles = imas_entry.get("core_profiles") """ # noqa @@ -384,9 +384,9 @@ def get_slice( time_requested: Requested time slice interpolation_method: Interpolation method to use. Available options: - - :const:`~imaspy.ids_defs.CLOSEST_INTERP` - - :const:`~imaspy.ids_defs.PREVIOUS_INTERP` - - :const:`~imaspy.ids_defs.LINEAR_INTERP` + - :const:`~imas.ids_defs.CLOSEST_INTERP` + - :const:`~imas.ids_defs.PREVIOUS_INTERP` + - :const:`~imas.ids_defs.LINEAR_INTERP` occurrence: Which occurrence of the IDS to read. @@ -414,11 +414,11 @@ def get_slice( Example: .. code-block:: python - import imaspy + import imas - imas_entry = imaspy.DBEntry(imaspy.ids_defs.MDSPLUS_BACKEND, "ITER", 131024, 41, "public") + imas_entry = imas.DBEntry(imas.ids_defs.MDSPLUS_BACKEND, "ITER", 131024, 41, "public") imas_entry.open() - core_profiles = imas_entry.get_slice("core_profiles", 370, imaspy.ids_defs.PREVIOUS_INTERP) + core_profiles = imas_entry.get_slice("core_profiles", 370, imas.ids_defs.PREVIOUS_INTERP) """ # noqa return self._get( ids_name, @@ -483,9 +483,9 @@ def get_sample( containing an explicit time base to interpolate. interpolation_method: Interpolation method to use. Available options: - - :const:`~imaspy.ids_defs.CLOSEST_INTERP` - - :const:`~imaspy.ids_defs.PREVIOUS_INTERP` - - :const:`~imaspy.ids_defs.LINEAR_INTERP` + - :const:`~imas.ids_defs.CLOSEST_INTERP` + - :const:`~imas.ids_defs.PREVIOUS_INTERP` + - :const:`~imas.ids_defs.LINEAR_INTERP` occurrence: Which occurrence of the IDS to read. @@ -513,11 +513,11 @@ def get_sample( Example: .. code-block:: python - import imaspy + import imas import numpy - from imaspy import ids_defs + from imas import ids_defs - imas_entry = imaspy.DBEntry( + imas_entry = imas.DBEntry( "imas:mdsplus?user=public;pulse=131024;run=41;database=ITER", "r") # All time slices between t=200 and t=370 @@ -605,12 +605,13 @@ def _get( if dd_version.split(".")[0] != destination._dd_version.split(".")[0]: logger.warning( "On-disk data is stored in DD %s which has a different major " - "version than the requested DD version (%s). IMASPy will convert " - "the data automatically, but this does not cover all changes. See " - "%s/multi-dd.html#conversion-of-idss-between-dd-versions", + "version than the requested DD version (%s). imas-python will " + "convert the data automatically, but this does not cover all" + "changes. " + "See %s/multi-dd.html#conversion-of-idss-between-dd-versions", dd_version, destination._dd_version, - imaspy.PUBLISHED_DOCUMENTATION_ROOT, + imas.PUBLISHED_DOCUMENTATION_ROOT, ) ddmap, source_is_older = dd_version_map_from_factories( ids_name, IDSFactory(version=dd_version), self._ids_factory @@ -643,7 +644,7 @@ def put(self, ids: IDSToplevel, occurrence: int = 0) -> None: Example: .. code-block:: python - ids = imaspy.IDSFactory().pf_active() + ids = imas.IDSFactory().pf_active() ... # fill the pf_active IDS here imas_entry.put(ids) """ @@ -682,7 +683,7 @@ def put_slice(self, ids: IDSToplevel, occurrence: int = 0) -> None: .. code-block:: python - ids = imaspy.IDSFactory().pf_active() ... # fill the static data of the + ids = imas.IDSFactory().pf_active() ... # fill the static data of the pf_active IDS here for i in range(N): ... # fill time slice of the pf_active IDS imas_entry.put_slice(ids) """ @@ -733,7 +734,7 @@ def _put(self, ids: IDSToplevel, occurrence: int, is_slice: bool): version_put = ids.ids_properties.version_put version_put.data_dictionary = self._ids_factory._version version_put.access_layer = self._dbe_impl.access_layer_version() - version_put.access_layer_language = f"imaspy {imaspy.__version__}" + version_put.access_layer_language = f"imas {imas.__version__}" self._dbe_impl.put(ids, occurrence, is_slice) diff --git a/imas/dd_helpers.py b/imas/dd_helpers.py new file mode 100644 index 00000000..f5fd5070 --- /dev/null +++ b/imas/dd_helpers.py @@ -0,0 +1,168 @@ +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Helper functions to build IDSDef.xml""" + +import logging +import os +import shutil +from pathlib import Path +from typing import Tuple +from zipfile import ZIP_DEFLATED, ZipFile + +from packaging.version import Version as V +from saxonche import PySaxonProcessor + +logger = logging.getLogger(__name__) + +_idsdef_zip_relpath = Path("imas/assets/IDSDef.zip") +_build_dir = Path("build") + + +def prepare_data_dictionaries(): + """Build IMAS IDSDef.xml files for each tagged version in the DD repository + 1. Use saxonche for transformations + 2. Clone the DD repository (ask for user/pass unless ssh key access is available) + 3. Generate IDSDef.xml and rename to IDSDef_${version}.xml + 4. Zip all these IDSDefs together and include in wheel + """ + from git import Repo + + repo: Repo = get_data_dictionary_repo() + if repo: + newest_version_and_tag = (V("0"), None) + for tag in repo.tags: + version_and_tag = (V(str(tag)), tag) + if V(str(tag)) > V("3.21.1"): + newest_version_and_tag = max(newest_version_and_tag, version_and_tag) + logger.debug("Building data dictionary version %s", tag) + build_data_dictionary(repo, tag) + + logger.info("Creating zip file of DD versions") + + if _idsdef_zip_relpath.is_file(): + logger.warning("Overwriting '%s'", _idsdef_zip_relpath) + + with ZipFile( + _idsdef_zip_relpath, + mode="w", # this needs w, since zip can have multiple same entries + compression=ZIP_DEFLATED, + ) as dd_zip: + for filename in _build_dir.glob("[0-9]*.xml"): + arcname = Path("data-dictionary").joinpath(*filename.parts[1:]) + dd_zip.write(filename, arcname=arcname) + # Include identifiers from latest tag in zip file + repo.git.checkout(newest_version_and_tag[1], force=True) + # DD layout <= 4.0.0 + for filename in Path("data-dictionary").glob("*/*identifier.xml"): + arcname = Path("identifiers").joinpath(*filename.parts[1:]) + dd_zip.write(filename, arcname=arcname) + # DD layout > 4.0.0 + for filename in Path("data-dictionary").glob("schemas/*/*identifier.xml"): + arcname = Path("identifiers").joinpath(*filename.parts[2:]) + dd_zip.write(filename, arcname=arcname) + + +def get_data_dictionary_repo() -> Tuple[bool, bool]: + try: + import git # Import git here, the user might not have it! + except ModuleNotFoundError: + raise RuntimeError( + "Could not find 'git' module, try 'pip install gitpython'. \ + Will not build Data Dictionaries!" + ) + + # We need the actual source code (for now) so grab it from ITER + dd_repo_path = "data-dictionary" + + if "DD_DIRECTORY" in os.environ: + logger.info("Found DD_DIRECTORY, copying") + try: + shutil.copytree(os.environ["DD_DIRECTORY"], dd_repo_path) + except FileExistsError: + pass + else: + logger.info("Trying to pull data dictionary git repo from ITER") + + # Set up a bare repo and fetch the data-dictionary repository in it + os.makedirs(dd_repo_path, exist_ok=True) + try: + repo = git.Repo(dd_repo_path) + except git.exc.InvalidGitRepositoryError: + repo = git.Repo.init(dd_repo_path) + logger.info("Set up local git repository {!s}".format(repo)) + + try: + origin = repo.remote() + except ValueError: + dd_repo_url = "https://github.com/iterorganization/imas-data-dictionary.git" + origin = repo.create_remote("origin", url=dd_repo_url) + logger.info("Set up remote '{!s}' linking to '{!s}'".format(origin, origin.url)) + + try: + origin.fetch(tags=True) + except git.exc.GitCommandError as ee: + logger.warning( + "Could not fetch tags from %s. Git reports:\n %s." "\nTrying to continue", + list(origin.urls), + ee, + ) + else: + logger.info("Remote tags fetched") + return repo + + +def _run_xsl_transformation( + xsd_file: Path, xsl_file: Path, tag: str, output_file: Path +) -> None: + """ + This function performs an XSL transformation using Saxon-HE (saxonche) + with the provided XSD file, XSL file, tag, and output file. + + Args: + xsd_file (Path): XML Schema Definition (XSD) file + xsl_file (Path): The `xsl_file` parameter + tag (str): tag name to provide to 'DD_GIT_DESCRIBE' parameter + output_file (Path): The `output_file` parameter for resulting xml + """ + with PySaxonProcessor(license=False) as proc: + logger.debug("Initializing Saxon Processor") + xsltproc = proc.new_xslt30_processor() + xdm_ddgit = proc.make_string_value(tag) + xsltproc.set_parameter("DD_GIT_DESCRIBE", xdm_ddgit) + xsltproc.transform_to_file( + source_file=str(xsd_file), + stylesheet_file=str(xsl_file), + output_file=str(output_file), + ) + logger.info("Transformation complete: %s -> %s", xsd_file, output_file) + + +def build_data_dictionary(repo, tag: str, rebuild=False) -> None: + """Build a single version of the data dictionary given by the tag argument + if the IDS does not already exist. + + In the data-dictionary repository sometimes IDSDef.xml is stored + directly, in which case we do not call make. + + Args: + repo: Repository object containing the DD source code + tag: The DD version tag that will be build + rebuild: If true, overwrites existing pre-build tagged DD version + """ + _build_dir.mkdir(exist_ok=True) + result_xml = _build_dir / f"{tag}.xml" + + if result_xml.exists() and not rebuild: + logger.debug(f"XML for tag '{tag}' already exists, skipping") + return + + repo.git.checkout(tag, force=True) + + # Perform the XSL transformation with saxonche + dd_xsd = Path("data-dictionary/dd_data_dictionary.xml.xsd") + dd_xsl = Path("data-dictionary/dd_data_dictionary.xml.xsl") + _run_xsl_transformation(dd_xsd, dd_xsl, tag.name, result_xml) + + +if __name__ == "__main__": + prepare_data_dictionaries() diff --git a/imaspy/dd_zip.py b/imas/dd_zip.py similarity index 89% rename from imaspy/dd_zip.py rename to imas/dd_zip.py index 15354eb6..cd6fa1cd 100644 --- a/imaspy/dd_zip.py +++ b/imas/dd_zip.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """ Extract DD versions from a zip file. The zip file contains files as @@ -7,27 +7,27 @@ * `data-dictionary/3.29.0.xml` multiple paths are checked. See `ZIPFILE_LOCATIONS`. -First the environment variable IMASPY_DDZIP is checked. +First the environment variable IMAS_DDZIP is checked. If that exists and points to a file we will attempt to open it. Then, IDSDef.zip is searched in site-packages, the current folder, -in .config/imaspy/ (`$$XDG_CONFIG_HOME`) and in -the assets/ folder within the IMASPy package. +in .config/imas/ (`$$XDG_CONFIG_HOME`) and in +the assets/ folder within the imas-python package. -1. `$$IMASPY_DDZIP` +1. `$$IMAS_DDZIP` 2. The virtual environment -3. USER_BASE`imaspy/IDSDef.zip` -4. All `site-packages/imaspy/IDSDef.zip` +3. USER_BASE`imas/IDSDef.zip` +4. All `site-packages/imas/IDSDef.zip` 5. `./IDSDef.zip` -6. `~/.config/imaspy/IDSDef.zip` -7. `__file__/../../imaspy/assets/IDSDef.zip` +6. `~/.config/imas/IDSDef.zip` +7. `__file__/../../imas/assets/IDSDef.zip` -All files are checked, i.e. if your .config/imaspy/IDSDef.zip is outdated -the IMASPy-packaged version will be used. +All files are checked, i.e. if your .config/imas/IDSDef.zip is outdated +the imas-python-packaged version will be used. The `assets/IDSDef.zip` provided with the package can be updated with the `python setup.py build_DD` command, which is also performed on install if you have access to the ITER data-dictionary git repo. -Reinstalling imaspy thus also will give you access to the latest DD versions. +Reinstalling imas thus also will give you access to the latest DD versions. """ import logging import os @@ -53,8 +53,8 @@ from packaging.version import InvalidVersion, Version -import imaspy -from imaspy.exception import UnknownDDVersion +import imas +from imas.exception import UnknownDDVersion logger = logging.getLogger(__name__) @@ -70,20 +70,20 @@ def _get_xdg_config_dir(): def _generate_zipfile_locations() -> Iterator[Union[Path, Traversable]]: """Build a list of potential data dictionary locations. - We start with the path (if any) of the IMASPY_DDZIP env var. + We start with the path (if any) of the IMAS_DDZIP env var. Then we look for IDSDef.zip in the current folder, in the - default XDG config dir (~/.config/imaspy/IDSDef.zip) and + default XDG config dir (~/.config/imas/IDSDef.zip) and finally in the assets distributed with this package. """ zip_name = "IDSDef.zip" - environ = os.environ.get("IMASPY_DDZIP") + environ = os.environ.get("IMAS_DDZIP") if environ: yield Path(environ).resolve() yield Path(zip_name).resolve() - yield Path(_get_xdg_config_dir()).resolve() / "imaspy" / zip_name - yield files(imaspy) / "assets" / zip_name + yield Path(_get_xdg_config_dir()).resolve() / "imas" / zip_name + yield files(imas) / "assets" / zip_name def parse_dd_version(version: str) -> Version: @@ -281,12 +281,12 @@ def get_identifier_xml(identifier_name): def print_supported_version_warning(version): try: - if parse_dd_version(version) < imaspy.OLDEST_SUPPORTED_VERSION: + if parse_dd_version(version) < imas.OLDEST_SUPPORTED_VERSION: logger.warning( "Version %s is below lowest supported version of %s.\ Proceed at your own risk.", version, - imaspy.OLDEST_SUPPORTED_VERSION, + imas.OLDEST_SUPPORTED_VERSION, ) except InvalidVersion: logging.warning("Ignoring version parsing error.", exc_info=1) diff --git a/imaspy/exception.py b/imas/exception.py similarity index 92% rename from imaspy/exception.py rename to imas/exception.py index 550ce2ed..fa89c326 100644 --- a/imaspy/exception.py +++ b/imas/exception.py @@ -1,16 +1,16 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Exception classes used in IMASPy. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Exception classes used in imas-python. """ import difflib import logging from typing import TYPE_CHECKING, List -from imaspy.backends.imas_core import imas_interface as _imas_interface +from imas.backends.imas_core import imas_interface as _imas_interface if TYPE_CHECKING: - from imaspy.ids_factory import IDSFactory + from imas.ids_factory import IDSFactory logger = logging.getLogger(__name__) diff --git a/imaspy/ids_base.py b/imas/ids_base.py similarity index 90% rename from imaspy/ids_base.py rename to imas/ids_base.py index 1d299c39..3e182772 100644 --- a/imaspy/ids_base.py +++ b/imas/ids_base.py @@ -1,17 +1,17 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Base class for all IDS nodes. """ import logging from typing import TYPE_CHECKING, Optional, Type -from imaspy.exception import ValidationError -from imaspy.ids_defs import IDS_TIME_MODE_INDEPENDENT -from imaspy.ids_metadata import IDSMetadata +from imas.exception import ValidationError +from imas.ids_defs import IDS_TIME_MODE_INDEPENDENT +from imas.ids_metadata import IDSMetadata if TYPE_CHECKING: - from imaspy.ids_toplevel import IDSToplevel + from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -62,7 +62,7 @@ def _path(self) -> str: AoS. Usage of _path is (and should remain) limited to "interactive" use cases - (like in :mod:`imaspy.util` and ``__repr__``) or when reporting errors. + (like in :mod:`imas.util` and ``__repr__``) or when reporting errors. Examples: - ``ids.ids_properties.creation_data._path`` is @@ -70,7 +70,7 @@ def _path(self) -> str: - ``gyrokinetics.wavevector[0].radial_component_norm._path`` is ``"wavevector[0]/radial_component_norm"`` """ - from imaspy.ids_struct_array import IDSStructArray + from imas.ids_struct_array import IDSStructArray parent_path = self._parent._path my_path = self.metadata.name @@ -117,7 +117,7 @@ def _validate(self) -> None: """Actual implementation of validation logic. See also: - :py:meth:`imaspy.ids_toplevel.IDSToplevel.validate`. + :py:meth:`imas.ids_toplevel.IDSToplevel.validate`. Args: aos_indices: index_name -> index, e.g. {"i1": 1, "itime": 0}, for all parent diff --git a/imaspy/ids_convert.py b/imas/ids_convert.py similarity index 98% rename from imaspy/ids_convert.py rename to imas/ids_convert.py index f31d6676..95ccd92c 100644 --- a/imaspy/ids_convert.py +++ b/imas/ids_convert.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Functionality for converting IDSToplevels between DD versions. """ @@ -14,21 +14,21 @@ import numpy from packaging.version import InvalidVersion, Version -import imaspy -from imaspy.dd_zip import parse_dd_version -from imaspy.ids_base import IDSBase -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_factory import IDSFactory -from imaspy.ids_path import IDSPath -from imaspy.ids_primitive import ( +import imas +from imas.dd_zip import parse_dd_version +from imas.ids_base import IDSBase +from imas.ids_data_type import IDSDataType +from imas.ids_factory import IDSFactory +from imas.ids_path import IDSPath +from imas.ids_primitive import ( IDSNumeric0D, IDSNumericArray, IDSPrimitive, IDSString0D, ) -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure -from imaspy.ids_toplevel import IDSToplevel +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -183,7 +183,7 @@ def _check_data_type(self, old_item: Element, new_item: Element): else: logger.debug( "Data type of %s changed from %s to %s. This change is not " - "supported by IMASPy: no conversion will be done.", + "supported by imas-python: no conversion will be done.", new_item.get("path"), old_item.get("data_type"), new_item.get("data_type"), @@ -527,7 +527,7 @@ def _add_provenance_entry( source_txt = ( f"{provenance_origin_uri}; " f"This IDS has been converted from DD {source_version} to " - f"DD {target_ids._dd_version} by IMASPy {imaspy.__version__}." + f"DD {target_ids._dd_version} by imas-python {imas.__version__}." ) if hasattr(node, "reference"): # DD version after IMAS-5304 diff --git a/imaspy/ids_coordinates.py b/imas/ids_coordinates.py similarity index 94% rename from imaspy/ids_coordinates.py rename to imas/ids_coordinates.py index 831b3fd3..8e3a2b70 100644 --- a/imaspy/ids_coordinates.py +++ b/imas/ids_coordinates.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Logic for interpreting coordinates in an IDS. """ @@ -9,16 +9,16 @@ import numpy as np -from imaspy.exception import CoordinateError, CoordinateLookupError, ValidationError -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_defs import EMPTY_FLOAT -from imaspy.ids_defs import IDS_TIME_MODE_HETEROGENEOUS as HETEROGENEOUS_TIME -from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS as HOMOGENEOUS_TIME -from imaspy.ids_path import IDSPath +from imas.exception import CoordinateError, CoordinateLookupError, ValidationError +from imas.ids_data_type import IDSDataType +from imas.ids_defs import EMPTY_FLOAT +from imas.ids_defs import IDS_TIME_MODE_HETEROGENEOUS as HETEROGENEOUS_TIME +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS as HOMOGENEOUS_TIME +from imas.ids_path import IDSPath if TYPE_CHECKING: # Prevent circular imports - from imaspy.ids_base import IDSBase - from imaspy.ids_primitive import IDSPrimitive + from imas.ids_base import IDSBase + from imas.ids_primitive import IDSPrimitive logger = logging.getLogger(__name__) @@ -142,10 +142,10 @@ class IDSCoordinates: Can be used to automatically retrieve coordinate values via the indexing operator. Example: - >>> import imaspy - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> import imas + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.ids_properties.homogeneous_time = \\ - ... imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + ... imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS >>> core_profiles.profiles_1d.coordinates[0] IDSNumericArray("/core_profiles/time", array([], dtype=float64)) """ @@ -281,7 +281,7 @@ def _validate(self): """Coordinate validation checks. See also: - :py:meth:`imaspy.ids_toplevel.IDSToplevel.validate`. + :py:meth:`imas.ids_toplevel.IDSToplevel.validate`. """ node = self._node shape = node.shape @@ -385,7 +385,9 @@ def _capture_goto_errors(self, dim, coordinate): "some coordinate metadata is incorrect." ) else: - version_error = "Please report this issue to the IMASPy developers." + version_error = ( + "Please report this issue to the imas-python developers." + ) logger.warning( "An error occurred while finding coordinate `%s` of dimension %s, " "which is ignored. %s", diff --git a/imaspy/ids_data_type.py b/imas/ids_data_type.py similarity index 95% rename from imaspy/ids_data_type.py rename to imas/ids_data_type.py index 794ccf3a..69a3a201 100644 --- a/imaspy/ids_data_type.py +++ b/imas/ids_data_type.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Data Dictionary type handling functionality. """ @@ -9,7 +9,7 @@ import numpy as np -from imaspy.ids_defs import ( +from imas.ids_defs import ( CHAR_DATA, COMPLEX_DATA, DOUBLE_DATA, diff --git a/imaspy/ids_defs.py b/imas/ids_defs.py similarity index 95% rename from imaspy/ids_defs.py rename to imas/ids_defs.py index b35352cb..26ac10c3 100644 --- a/imaspy/ids_defs.py +++ b/imas/ids_defs.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -""" Load IMASPy libs to provide constants +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +""" Load imas-python libs to provide constants .. _`Backend identifiers`: @@ -89,7 +89,7 @@ import functools import logging -from imaspy.backends.imas_core.imas_interface import has_imas, imasdef +from imas.backends.imas_core.imas_interface import has_imas, imasdef logger = logging.getLogger(__name__) diff --git a/imaspy/ids_factory.py b/imas/ids_factory.py similarity index 88% rename from imaspy/ids_factory.py rename to imas/ids_factory.py index e1bde19d..2173985b 100644 --- a/imaspy/ids_factory.py +++ b/imas/ids_factory.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Tools for generating IDSs from a Data Dictionary version. """ @@ -7,9 +7,9 @@ from functools import partial from typing import Any, Iterable, Iterator, List, Optional -from imaspy import dd_zip -from imaspy.exception import IDSNameError -from imaspy.ids_toplevel import IDSToplevel +from imas import dd_zip +from imas.exception import IDSNameError +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -21,9 +21,9 @@ class IDSFactory: >>> factory = IDSFactory() >>> factory.core_profiles() - + >>> factory.new("core_profiles") - + """ def __init__( @@ -31,7 +31,7 @@ def __init__( ) -> None: """Create a new IDS Factory - See :meth:`imaspy.dd_zip.dd_etree` for further details on the ``version`` and + See :meth:`imas.dd_zip.dd_etree` for further details on the ``version`` and ``xml_path`` arguments. Args: diff --git a/imaspy/ids_identifiers.py b/imas/ids_identifiers.py similarity index 94% rename from imaspy/ids_identifiers.py rename to imas/ids_identifiers.py index 397975d0..df72be32 100644 --- a/imaspy/ids_identifiers.py +++ b/imas/ids_identifiers.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""IMASPy module to support Data Dictionary identifiers. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""imas-python module to support Data Dictionary identifiers. """ import logging @@ -8,7 +8,7 @@ from typing import Iterable, List, Type from xml.etree.ElementTree import fromstring -from imaspy import dd_zip +from imas import dd_zip logger = logging.getLogger(__name__) @@ -107,7 +107,7 @@ def identifiers(self) -> List[str]: Example: .. code-block:: python - from imaspy import identifiers + from imas import identifiers # List all identifier names for identifier_name in identifiers.identifiers: print(identifier_name) diff --git a/imaspy/ids_metadata.py b/imas/ids_metadata.py similarity index 92% rename from imaspy/ids_metadata.py rename to imas/ids_metadata.py index 05172627..2cd0e224 100644 --- a/imaspy/ids_metadata.py +++ b/imas/ids_metadata.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Core of the IMASPy interpreted IDS metadata +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Core of the imas-python interpreted IDS metadata """ import re import types @@ -10,10 +10,10 @@ from typing import Any, Dict, Iterator, Optional, Tuple, Type from xml.etree.ElementTree import Element -from imaspy.ids_coordinates import IDSCoordinate -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_identifiers import IDSIdentifier, identifiers -from imaspy.ids_path import IDSPath +from imas.ids_coordinates import IDSCoordinate +from imas.ids_data_type import IDSDataType +from imas.ids_identifiers import IDSIdentifier, identifiers +from imas.ids_path import IDSPath class IDSType(Enum): @@ -26,7 +26,7 @@ class IDSType(Enum): operation); ``dynamic`` data are those which vary in time within the context of the data. - As in the Python HLI, IMASPy only distinguishes between dynamic and non-dynamic + As in the Python HLI, imas-python only distinguishes between dynamic and non-dynamic nodes. """ @@ -86,7 +86,7 @@ def _build_type_map(): This must be done in a separate function to avoid circular imports. """ - from imaspy.ids_primitive import ( + from imas.ids_primitive import ( IDSComplex0D, IDSFloat0D, IDSInt0D, @@ -94,9 +94,9 @@ def _build_type_map(): IDSString0D, IDSString1D, ) - from imaspy.ids_struct_array import IDSStructArray - from imaspy.ids_structure import IDSStructure - from imaspy.ids_toplevel import IDSToplevel + from imas.ids_struct_array import IDSStructArray + from imas.ids_structure import IDSStructure + from imas.ids_toplevel import IDSToplevel _type_map[(None, 0)] = IDSToplevel _type_map[(IDSDataType.STRUCTURE, 0)] = IDSStructure @@ -123,7 +123,7 @@ class IDSMetadata: .. code-block:: python - core_profiles = imaspy.IDSFactory().core_profiles() + core_profiles = imas.IDSFactory().core_profiles() # Get the metadata of the time child of the profiles_1d array of structures p1d_time_meta = core_profiles.metadata["profiles_1d/time"] @@ -154,7 +154,8 @@ def __init__( else: self._ctx_path = self.name - # These are special and used in IMASPy logic, so we need to ensure proper values + # These are special and used in imas-python logic, + # so we need to ensure proper values maxoccur = attrib.get("maxoccur", "unbounded") self.maxoccur: Optional[int] = ( None if maxoccur == "unbounded" else int(maxoccur) @@ -291,7 +292,7 @@ def identifier_enum(self) -> Optional[Type[IDSIdentifier]]: """The identifier enum for this IDS node (if available). This property is an identifier enum (a subclass of - :py:class:`imaspy.ids_identifiers.IDSIdentifier`) if this node represents an + :py:class:`imas.ids_identifiers.IDSIdentifier`) if this node represents an identifier, and the Data Dictionary defines the allowed identifier values. This property is ``None`` when this node is not an identifier, or the Data diff --git a/imaspy/ids_path.py b/imas/ids_path.py similarity index 97% rename from imaspy/ids_path.py rename to imas/ids_path.py index a0a90dbc..75fb6694 100644 --- a/imaspy/ids_path.py +++ b/imas/ids_path.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Logic for interpreting paths to elements in an IDS """ @@ -8,8 +8,8 @@ from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Tuple, Union if TYPE_CHECKING: # Prevent circular imports - from imaspy.ids_base import IDSBase - from imaspy.ids_metadata import IDSMetadata + from imas.ids_base import IDSBase + from imas.ids_metadata import IDSMetadata logger = logging.getLogger(__name__) @@ -181,7 +181,7 @@ def goto(self, from_element: "IDSBase", *, from_root: bool = True) -> "IDSBase": Example: .. code-block:: python - cp = imaspy.IDSFactory().core_profiles() + cp = imas.IDSFactory().core_profiles() cp.profiles_1d.resize(1) element = cp.profiles_1d[0] path1 = IDSPath("ids_properties/homogeneous_time") @@ -236,7 +236,7 @@ def goto_metadata(self, from_metadata: "IDSMetadata") -> "IDSMetadata": Example: .. code-block:: python - es = imaspy.IDSFactory().edge_sources() + es = imas.IDSFactory().edge_sources() path = IDSPath("source/ggd/ion/energy") energy_metadata = path.goto_metadata(es.metadata) """ diff --git a/imaspy/ids_primitive.py b/imas/ids_primitive.py similarity index 97% rename from imaspy/ids_primitive.py rename to imas/ids_primitive.py index 94f865b6..816ebac6 100644 --- a/imaspy/ids_primitive.py +++ b/imas/ids_primitive.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Provides the classes for IDS data nodes """ import logging @@ -13,10 +13,10 @@ import numpy as np from xxhash import xxh3_64, xxh3_64_digest -from imaspy.ids_base import IDSBase, IDSDoc -from imaspy.ids_coordinates import IDSCoordinates -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_metadata import IDSMetadata +from imas.ids_base import IDSBase, IDSDoc +from imas.ids_coordinates import IDSCoordinates +from imas.ids_data_type import IDSDataType +from imas.ids_metadata import IDSMetadata logger = logging.getLogger(__name__) @@ -241,7 +241,7 @@ def _cast_value(self, value): @property def data_type(self): - """Combine imaspy ids_type and ndims to AL data_type""" + """Combine imas ids_type and ndims to AL data_type""" return "{!s}_{!s}D".format(self.metadata.data_type.value, self.metadata.ndim) def _validate(self) -> None: diff --git a/imaspy/ids_struct_array.py b/imas/ids_struct_array.py similarity index 93% rename from imaspy/ids_struct_array.py rename to imas/ids_struct_array.py index bc7dd92c..9f79a130 100644 --- a/imaspy/ids_struct_array.py +++ b/imas/ids_struct_array.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """IDS StructArray represents an Array of Structures in the IDS tree. """ @@ -9,11 +9,11 @@ from xxhash import xxh3_64 -from imaspy.backends.imas_core.al_context import LazyALArrayStructContext -from imaspy.ids_base import IDSBase, IDSDoc -from imaspy.ids_coordinates import IDSCoordinates -from imaspy.ids_identifiers import IDSIdentifier -from imaspy.ids_metadata import IDSMetadata +from imas.backends.imas_core.al_context import LazyALArrayStructContext +from imas.ids_base import IDSBase, IDSDoc +from imas.ids_coordinates import IDSCoordinates +from imas.ids_identifiers import IDSIdentifier +from imas.ids_metadata import IDSMetadata logger = logging.getLogger(__name__) @@ -107,7 +107,7 @@ def _load(self, item: Optional[int]) -> None: if item < 0 or item >= len(self): raise IndexError("list index out of range") # Create the requested item - from imaspy.ids_structure import IDSStructure + from imas.ids_structure import IDSStructure element = self.value[item] = IDSStructure(self, self.metadata) element._set_lazy_context(self._lazy_ctx.iterate_to_index(item)) @@ -115,7 +115,7 @@ def _load(self, item: Optional[int]) -> None: @property def _element_structure(self): """Prepare an element structure JIT""" - from imaspy.ids_structure import IDSStructure + from imas.ids_structure import IDSStructure struct = IDSStructure(self, self.metadata) return struct @@ -194,7 +194,7 @@ def resize(self, nbelt: int, keep: bool = False): cur = len(self.value) if nbelt > cur: # Create new structures to fill this AoS with - from imaspy.ids_structure import IDSStructure + from imas.ids_structure import IDSStructure new_els = [IDSStructure(self, self.metadata) for _ in range(nbelt - cur)] if cur: diff --git a/imaspy/ids_structure.py b/imas/ids_structure.py similarity index 91% rename from imaspy/ids_structure.py rename to imas/ids_structure.py index dd4d42ff..f55755fc 100644 --- a/imaspy/ids_structure.py +++ b/imas/ids_structure.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """A structure in an IDS """ @@ -10,13 +10,13 @@ from xxhash import xxh3_64 -from imaspy.backends.imas_core.al_context import LazyALContext -from imaspy.ids_base import IDSBase, IDSDoc -from imaspy.ids_identifiers import IDSIdentifier -from imaspy.ids_metadata import IDSDataType, IDSMetadata -from imaspy.ids_path import IDSPath -from imaspy.ids_primitive import IDSPrimitive -from imaspy.ids_struct_array import IDSStructArray +from imas.backends.imas_core.al_context import LazyALContext +from imas.ids_base import IDSBase, IDSDoc +from imas.ids_identifiers import IDSIdentifier +from imas.ids_metadata import IDSDataType, IDSMetadata +from imas.ids_path import IDSPath +from imas.ids_primitive import IDSPrimitive +from imas.ids_struct_array import IDSStructArray logger = logging.getLogger(__name__) @@ -63,7 +63,7 @@ def __getattr__(self, name): child = child_meta._node_type(self, child_meta) self.__dict__[name] = child # bypass setattr logic below: avoid recursion if self._lazy: # lazy load the child - from imaspy.backends.imas_core.db_entry_helpers import _get_child + from imas.backends.imas_core.db_entry_helpers import _get_child _get_child(child, self._lazy_context) return child @@ -98,7 +98,7 @@ def _assign_identifier(self, value: Union[IDSIdentifier, str, int]) -> None: def __setattr__(self, key, value): """ - 'Smart' setting of attributes. To be able to warn the user on imaspy + 'Smart' setting of attributes. To be able to warn the user on imas IDS interaction time, instead of on database put time Only try to cast user-facing attributes, as core developers might want to always bypass this mechanism (I know I do!) @@ -162,7 +162,7 @@ def __eq__(self, other) -> bool: return True if not isinstance(other, IDSStructure): return False if isinstance(other, IDSBase) else NotImplemented - from imaspy.util import idsdiffgen # local import to avoid circular import + from imas.util import idsdiffgen # local import to avoid circular import for _ in idsdiffgen(self, other): return False # Not equal if there is any difference @@ -211,8 +211,8 @@ def iter_nonempty_(self, *, accept_lazy=False) -> Generator[IDSBase, None, None] .. code-block:: python :caption: ``iter_nonempty_`` for fully loaded IDSs - >>> import imaspy.training - >>> entry = imaspy.training.get_training_db_entry() + >>> import imas.training + >>> entry = imas.training.get_training_db_entry() >>> cp = entry.get("core_profiles") >>> list(cp.iter_nonempty_()) [ @@ -225,8 +225,8 @@ def iter_nonempty_(self, *, accept_lazy=False) -> Generator[IDSBase, None, None] .. code-block:: python :caption: ``iter_nonempty_`` for lazy-loaded IDSs - >>> import imaspy.training - >>> entry = imaspy.training.get_training_db_entry() + >>> import imas.training + >>> entry = imas.training.get_training_db_entry() >>> cp = entry.get("core_profiles", lazy=True) >>> list(cp.iter_nonempty_()) RuntimeError: Iterating over non-empty nodes of a lazy loaded IDS will @@ -252,9 +252,9 @@ def iter_nonempty_(self, *, accept_lazy=False) -> Generator[IDSBase, None, None] "Iterating over non-empty nodes of a lazy loaded IDS will skip nodes " "that are not loaded. Set accept_lazy=True to continue. " "See the documentation for more information: " - "https://sharepoint.iter.org/departments/POP/CM/IMDesign/" - "Code%20Documentation/IMASPy-doc/generated/imaspy.ids_structure." - "IDSStructure.html#imaspy.ids_structure.IDSStructure.iter_nonempty_" + "https://imas-python.readthedocs.io/en/latest" + "/generated/imas.ids_structure." + "IDSStructure.html#imas.ids_structure.IDSStructure.iter_nonempty_" ) for child in self._children: if child in self.__dict__: diff --git a/imaspy/ids_toplevel.py b/imas/ids_toplevel.py similarity index 87% rename from imaspy/ids_toplevel.py rename to imas/ids_toplevel.py index faeb6366..d0fa8e0b 100644 --- a/imaspy/ids_toplevel.py +++ b/imas/ids_toplevel.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Represents a Top-level IDS (like ``core_profiles``, ``equilibrium``, etc) """ @@ -11,11 +11,11 @@ import numpy -import imaspy -from imaspy.backends.imas_core.imas_interface import ll_interface, lowlevel -from imaspy.exception import ValidationError -from imaspy.ids_base import IDSDoc -from imaspy.ids_defs import ( +import imas +from imas.backends.imas_core.imas_interface import ll_interface, lowlevel +from imas.exception import ValidationError +from imas.ids_base import IDSDoc +from imas.ids_defs import ( ASCII_BACKEND, ASCII_SERIALIZER_PROTOCOL, CHAR_DATA, @@ -26,12 +26,12 @@ IDS_TIME_MODES, needs_imas, ) -from imaspy.ids_metadata import IDSMetadata, IDSType, get_toplevel_metadata -from imaspy.ids_structure import IDSStructure +from imas.ids_metadata import IDSMetadata, IDSType, get_toplevel_metadata +from imas.ids_structure import IDSStructure if TYPE_CHECKING: - from imaspy.db_entry import DBEntry - from imaspy.ids_factory import IDSFactory + from imas.db_entry import DBEntry + from imas.ids_factory import IDSFactory _FLEXBUFFERS_URI = "imas:flexbuffers?path=/" @@ -48,14 +48,14 @@ def _serializer_tmpdir() -> str: def _create_serialization_dbentry(filepath: str, dd_version: str) -> "DBEntry": """Create a temporary DBEntry for use in the ASCII serialization protocol.""" if ll_interface._al_version.major == 4: # AL4 compatibility - dbentry = imaspy.DBEntry( + dbentry = imas.DBEntry( ASCII_BACKEND, "serialize", 1, 1, "serialize", dd_version=dd_version ) dbentry.create(options=f"-fullpath {filepath}") return dbentry else: # AL5 path = Path(filepath) - return imaspy.DBEntry( + return imas.DBEntry( f"imas:ascii?path={path.parent};filename={path.name}", "w", dd_version=dd_version, @@ -119,7 +119,7 @@ def serialize(self, protocol=None) -> bytes: .. code-block: python - core_profiles = imaspy.IDSFactory().core_profiles() + core_profiles = imas.IDSFactory().core_profiles() # fill core_profiles with data ... @@ -128,7 +128,7 @@ def serialize(self, protocol=None) -> bytes: # For example, send `data` to another program with libmuscle. # Then deserialize on the receiving side: - core_profiles = imaspy.IDSFactory().core_profiles() + core_profiles = imas.IDSFactory().core_profiles() core_profiles.deserialize(data) # Use core_profiles: ... @@ -137,9 +137,9 @@ def serialize(self, protocol=None) -> bytes: protocol: Which serialization protocol to use. Uses ``DEFAULT_SERIALIZER_PROTOCOL`` when none specified. One of: - - :const:`~imaspy.ids_defs.ASCII_SERIALIZER_PROTOCOL` - - :const:`~imaspy.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` - - :const:`~imaspy.ids_defs.DEFAULT_SERIALIZER_PROTOCOL` + - :const:`~imas.ids_defs.ASCII_SERIALIZER_PROTOCOL` + - :const:`~imas.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` + - :const:`~imas.ids_defs.DEFAULT_SERIALIZER_PROTOCOL` The flexbuffers serializer protocol is only available when using ``imas_core >= 5.3``. It's the default protocol when it is available. @@ -169,7 +169,7 @@ def serialize(self, protocol=None) -> bytes: if protocol == FLEXBUFFERS_SERIALIZER_PROTOCOL: # Note: FLEXBUFFERS_SERIALIZER_PROTOCOL is None when imas_core doesn't # support this format - with imaspy.DBEntry(_FLEXBUFFERS_URI, "w", dd_version=dd_version) as entry: + with imas.DBEntry(_FLEXBUFFERS_URI, "w", dd_version=dd_version) as entry: entry.put(self) # Read serialized buffer status, buffer = lowlevel.al_read_data_array( @@ -207,7 +207,7 @@ def deserialize(self, data: bytes) -> None: if os.path.exists(filepath): os.unlink(filepath) elif protocol == FLEXBUFFERS_SERIALIZER_PROTOCOL: - with imaspy.DBEntry(_FLEXBUFFERS_URI, "r", dd_version=dd_version) as entry: + with imas.DBEntry(_FLEXBUFFERS_URI, "r", dd_version=dd_version) as entry: # Write serialized buffer to the flexbuffers backend buffer = numpy.frombuffer(data, dtype=numpy.int8) lowlevel._al_write_data_array( @@ -253,16 +253,16 @@ def validate(self): Example: - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.validate() # Did not set homogeneous_time [...] - imaspy.exception.ValidationError: Invalid value for ids_properties/homogeneous_time: IDSPrimitive("/core_profiles/ids_properties/homogeneous_time", -999999999) - >>> core_profiles.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + imas.exception.ValidationError: Invalid value for ids_properties/homogeneous_time: IDSPrimitive("/core_profiles/ids_properties/homogeneous_time", -999999999) + >>> core_profiles.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS >>> core_profiles.validate() # No error: IDS is valid >>> core_profiles.profiles_1d.resize(1) >>> core_profiles.validate() [...] - imaspy.exception.CoordinateError: Dimension 1 of element profiles_1d has incorrect size 1. Expected size is 0 (size of coordinate time). + imas.exception.CoordinateError: Dimension 1 of element profiles_1d has incorrect size 1. Expected size is 0 (size of coordinate time). >>> core_profiles.time = [1] >>> core_profiles.validate() # No error: IDS is valid @@ -303,7 +303,7 @@ def get(self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None) -> None """Get data from AL backend storage format. This method exists for API compatibility with the IMAS python HLI. - See :py:meth:`DBEntry.get `. + See :py:meth:`DBEntry.get `. """ if db_entry is None: raise NotImplementedError() @@ -320,7 +320,7 @@ def getSlice( """Get a slice from the backend. This method exists for API compatibility with the IMAS python HLI. - See :py:meth:`DBEntry.get_slice `. + See :py:meth:`DBEntry.get_slice `. """ if db_entry is None: raise NotImplementedError() @@ -339,7 +339,7 @@ def putSlice( """Put a single slice into the backend. This method exists for API compatibility with the IMAS python HLI. - See :py:meth:`DBEntry.put_slice `. + See :py:meth:`DBEntry.put_slice `. """ if db_entry is None: raise NotImplementedError() @@ -352,7 +352,7 @@ def deleteData( """Delete AL backend storage data. This method exists for API compatibility with the IMAS python HLI. - See :py:meth:`DBEntry.delete_data `. + See :py:meth:`DBEntry.delete_data `. """ if db_entry is None: raise NotImplementedError() @@ -363,7 +363,7 @@ def put(self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None) -> None """Put this IDS to the backend. This method exists for API compatibility with the IMAS python HLI. - See :py:meth:`DBEntry.put `. + See :py:meth:`DBEntry.put `. """ if db_entry is None: raise NotImplementedError() diff --git a/imaspy/setup_logging.py b/imas/setup_logging.py similarity index 83% rename from imaspy/setup_logging.py rename to imas/setup_logging.py index 94d49058..e7cfcd5f 100644 --- a/imaspy/setup_logging.py +++ b/imas/setup_logging.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Create a default log handler when IMASPy is imported. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Create a default log handler when imas-python is imported. """ import logging @@ -37,7 +37,7 @@ def format(self, record): def test_messages(): """Print out a message on each logging level""" - logger = logging.getLogger("imaspy.testlogger") + logger = logging.getLogger("imas.testlogger") logger.debug("Debug message") logger.info("Info message") logger.warning("Warning message") @@ -54,10 +54,10 @@ def connect_formatter(logger): # Log to console by default, and output it all -logger = logging.getLogger("imaspy") +logger = logging.getLogger("imas") connect_formatter(logger) -loglevel = os.getenv("IMASPY_LOGLEVEL") or "INFO" +loglevel = os.getenv("IMAS_LOGLEVEL") or "INFO" logger.setLevel(loglevel) if __name__ == "__main__": diff --git a/imaspy/test/test_all_dd_versions.py b/imas/test/test_all_dd_versions.py similarity index 89% rename from imaspy/test/test_all_dd_versions.py rename to imas/test/test_all_dd_versions.py index 25514eac..c30a7bca 100644 --- a/imaspy/test/test_all_dd_versions.py +++ b/imas/test/test_all_dd_versions.py @@ -1,7 +1,7 @@ import pytest -from imaspy import dd_zip, ids_metadata -from imaspy.ids_factory import IDSFactory +from imas import dd_zip, ids_metadata +from imas.ids_factory import IDSFactory @pytest.fixture diff --git a/imaspy/test/test_cli.py b/imas/test/test_cli.py similarity index 77% rename from imaspy/test/test_cli.py rename to imas/test/test_cli.py index 604a7f7e..8ee95e27 100644 --- a/imaspy/test/test_cli.py +++ b/imas/test/test_cli.py @@ -4,15 +4,15 @@ from click.testing import CliRunner from packaging.version import Version -from imaspy.backends.imas_core.imas_interface import ll_interface -from imaspy.command.cli import print_version -from imaspy.command.db_analysis import analyze_db, process_db_analysis -from imaspy.db_entry import DBEntry -from imaspy.test.test_helpers import fill_with_random_data +from imas.backends.imas_core.imas_interface import ll_interface +from imas.command.cli import print_version +from imas.command.db_analysis import analyze_db, process_db_analysis +from imas.db_entry import DBEntry +from imas.test.test_helpers import fill_with_random_data @pytest.mark.cli -def test_imaspy_version(): +def test_imas_version(): runner = CliRunner() result = runner.invoke(print_version) assert result.exit_code == 0 @@ -33,7 +33,7 @@ def test_db_analysis(tmp_path): analyze_result = runner.invoke(analyze_db, [str(db_path)]) assert analyze_result.exit_code == 0, analyze_result.output - outfile = Path(td) / "imaspy-db-analysis.json.gz" + outfile = Path(td) / "imas-db-analysis.json.gz" assert outfile.exists() # Show detailed output for core_profiles, and then an empty input to exit cleanly: diff --git a/imaspy/test/test_dbentry.py b/imas/test/test_dbentry.py similarity index 66% rename from imaspy/test/test_dbentry.py rename to imas/test/test_dbentry.py index 2d82af36..9ba43e00 100644 --- a/imaspy/test/test_dbentry.py +++ b/imas/test/test_dbentry.py @@ -1,21 +1,21 @@ import pytest -import imaspy -import imaspy.ids_defs -from imaspy.backends.imas_core.imas_interface import has_imas, ll_interface -from imaspy.exception import UnknownDDVersion -from imaspy.test.test_helpers import compare_children, open_dbentry +import imas +import imas.ids_defs +from imas.backends.imas_core.imas_interface import has_imas, ll_interface +from imas.exception import UnknownDDVersion +from imas.test.test_helpers import compare_children, open_dbentry def test_dbentry_contextmanager(requires_imas): - entry = imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "test", 1, 1) + entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 1, 1) entry.create() ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 ids.ids_properties.comment = "test context manager" entry.put(ids) - with imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "test", 1, 1) as entry2: + with imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 1, 1) as entry2: ids2 = entry2.get("core_profiles") assert ids2.ids_properties.comment == ids.ids_properties.comment @@ -28,13 +28,13 @@ def test_dbentry_contextmanager(requires_imas): reason="URI API not available", ) def test_dbentry_contextmanager_uri(tmp_path): - entry = imaspy.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "w") + entry = imas.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "w") ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 ids.ids_properties.comment = "test context manager" entry.put(ids) - with imaspy.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "r") as entry2: + with imas.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "r") as entry2: ids2 = entry2.get("core_profiles") assert ids2.ids_properties.comment == ids.ids_properties.comment @@ -42,7 +42,7 @@ def test_dbentry_contextmanager_uri(tmp_path): assert entry2._dbe_impl is None -def get_entry_attrs(entry: imaspy.DBEntry): +def get_entry_attrs(entry: imas.DBEntry): return ( entry.backend_id, entry.db_name, @@ -55,35 +55,35 @@ def get_entry_attrs(entry: imaspy.DBEntry): def test_dbentry_constructor(): with pytest.raises(TypeError): - imaspy.DBEntry() # no arguments + imas.DBEntry() # no arguments with pytest.raises(TypeError): - imaspy.DBEntry(1) # not enough arguments + imas.DBEntry(1) # not enough arguments with pytest.raises(TypeError): - imaspy.DBEntry(1, 2, 3) # not enough arguments + imas.DBEntry(1, 2, 3) # not enough arguments with pytest.raises(TypeError): - imaspy.DBEntry(1, 2, 3, 4, 5, 6, 7) # too many arguments + imas.DBEntry(1, 2, 3, 4, 5, 6, 7) # too many arguments with pytest.raises(TypeError): - imaspy.DBEntry("test", uri="test") # Double URI argument + imas.DBEntry("test", uri="test") # Double URI argument with pytest.raises(TypeError): - imaspy.DBEntry(1, 2, 3, 4, shot=5) # Multiple values for argument pulse + imas.DBEntry(1, 2, 3, 4, shot=5) # Multiple values for argument pulse with pytest.raises(ValueError): - imaspy.DBEntry(1, 2, pulse=3, run=4, shot=5) # Both shot and pulse + imas.DBEntry(1, 2, pulse=3, run=4, shot=5) # Both shot and pulse - entry = imaspy.DBEntry(1, 2, 3, 4) + entry = imas.DBEntry(1, 2, 3, 4) assert get_entry_attrs(entry) == (1, 2, 3, 4, None, None) - entry = imaspy.DBEntry(backend_id=1, db_name=2, pulse=3, run=4) + entry = imas.DBEntry(backend_id=1, db_name=2, pulse=3, run=4) assert get_entry_attrs(entry) == (1, 2, 3, 4, None, None) # Shot behaves as alias of pulse - entry = imaspy.DBEntry(backend_id=1, db_name=2, shot=3, run=4) + entry = imas.DBEntry(backend_id=1, db_name=2, shot=3, run=4) assert get_entry_attrs(entry) == (1, 2, 3, 4, None, None) - entry = imaspy.DBEntry(1, 2, 3, 4, 5, 6) + entry = imas.DBEntry(1, 2, 3, 4, 5, 6) assert get_entry_attrs(entry) == (1, 2, 3, 4, 5, 6) - entry = imaspy.DBEntry(1, 2, 3, 4, data_version=6) + entry = imas.DBEntry(1, 2, 3, 4, data_version=6) assert get_entry_attrs(entry) == (1, 2, 3, 4, None, 6) def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path): - entry = open_dbentry(imaspy.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) + entry = open_dbentry(imas.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 ids.ids_properties.comment = "Test unknown DD version" diff --git a/imas/test/test_dd_helpers.py b/imas/test/test_dd_helpers.py new file mode 100644 index 00000000..bb9d0d11 --- /dev/null +++ b/imas/test/test_dd_helpers.py @@ -0,0 +1,56 @@ +from pathlib import Path +import shutil +import pytest +import os +import zipfile + +from imas.dd_helpers import prepare_data_dictionaries, _idsdef_zip_relpath, _build_dir + +_idsdef_unzipped_relpath = Path("idsdef_unzipped") + + +def test_prepare_data_dictionaries(): + prepare_data_dictionaries() + assert os.path.exists( + _idsdef_zip_relpath + ), f"IDSDef.zip file does not exist at path: {_idsdef_zip_relpath}" + + expected_xml_files = [ + _build_dir / "3.40.0.xml", + _build_dir / "3.41.0.xml", + _build_dir / "3.42.0.xml", + _build_dir / "4.0.0.xml", + ] + + for xml_file in expected_xml_files: + assert os.path.exists(xml_file), f"{xml_file} does not exist" + + with zipfile.ZipFile(_idsdef_zip_relpath, "r") as zip_ref: + zip_ref.extractall(_idsdef_unzipped_relpath) + + expected_ids_directories = [ + _idsdef_unzipped_relpath / "data-dictionary" / "3.40.0.xml", + _idsdef_unzipped_relpath / "data-dictionary" / "3.41.0.xml", + _idsdef_unzipped_relpath / "data-dictionary" / "3.42.0.xml", + _idsdef_unzipped_relpath / "data-dictionary" / "4.0.0.xml", + _idsdef_unzipped_relpath + / "identifiers" + / "core_sources" + / "core_source_identifier.xml", + _idsdef_unzipped_relpath + / "identifiers" + / "equilibrium" + / "equilibrium_profiles_2d_identifier.xml", + ] + + for file_path in expected_ids_directories: + assert os.path.exists( + file_path + ), f"Expected_ids_directories {file_path} does not exist" + + if _build_dir.exists(): + shutil.rmtree(_idsdef_unzipped_relpath) + + +if __name__ == "__main__": + pytest.main() diff --git a/imaspy/test/test_dd_zip.py b/imas/test/test_dd_zip.py similarity index 87% rename from imaspy/test/test_dd_zip.py rename to imas/test/test_dd_zip.py index 243256e5..88b5c420 100644 --- a/imaspy/test/test_dd_zip.py +++ b/imas/test/test_dd_zip.py @@ -1,8 +1,8 @@ import pytest from packaging.version import InvalidVersion -from imaspy.dd_zip import get_dd_xml, parse_dd_version -from imaspy.exception import UnknownDDVersion +from imas.dd_zip import get_dd_xml, parse_dd_version +from imas.exception import UnknownDDVersion def test_known_version(): diff --git a/imaspy/test/test_deepcopy.py b/imas/test/test_deepcopy.py similarity index 63% rename from imaspy/test/test_deepcopy.py rename to imas/test/test_deepcopy.py index 5e889d8e..07cd5645 100644 --- a/imaspy/test/test_deepcopy.py +++ b/imas/test/test_deepcopy.py @@ -1,9 +1,9 @@ import copy -import imaspy -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure -from imaspy.test.test_helpers import compare_children, fill_with_random_data +import imas +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.test.test_helpers import compare_children, fill_with_random_data def validate_parent(node): @@ -14,7 +14,7 @@ def validate_parent(node): def test_deepcopy(): - factory = imaspy.IDSFactory() + factory = imas.IDSFactory() cp = factory.core_profiles() fill_with_random_data(cp) diff --git a/imaspy/test/test_exception.py b/imas/test/test_exception.py similarity index 67% rename from imaspy/test/test_exception.py rename to imas/test/test_exception.py index e27d21ef..37bebfce 100644 --- a/imaspy/test/test_exception.py +++ b/imas/test/test_exception.py @@ -1,12 +1,12 @@ import pytest -import imaspy -from imaspy.backends.imas_core.imas_interface import ll_interface +import imas +from imas.backends.imas_core.imas_interface import ll_interface def test_catch_al_exception(requires_imas): # Do something which lets the lowlevel Cython interface throw an ALException # Ensure we can catch it: - with pytest.raises(imaspy.exception.ALException): + with pytest.raises(imas.exception.ALException): # Try to write an unknown data type (object) ll_interface.write_data(-1, "X", "", object()) diff --git a/imaspy/test/test_get_sample.py b/imas/test/test_get_sample.py similarity index 98% rename from imaspy/test/test_get_sample.py rename to imas/test/test_get_sample.py index 0f5fed3e..c91ad356 100644 --- a/imaspy/test/test_get_sample.py +++ b/imas/test/test_get_sample.py @@ -1,10 +1,10 @@ import numpy as np import pytest -import imaspy -from imaspy.backends.imas_core.imas_interface import lowlevel -from imaspy.exception import DataEntryException -from imaspy.ids_defs import ( +import imas +from imas.backends.imas_core.imas_interface import lowlevel +from imas.exception import DataEntryException +from imas.ids_defs import ( CLOSEST_INTERP, HDF5_BACKEND, IDS_TIME_MODE_HETEROGENEOUS, @@ -28,7 +28,7 @@ def test_db_uri(backend, worker_id, tmp_path_factory): tmp_path = tmp_path_factory.mktemp(f"testdb.{worker_id}") backend_str = {HDF5_BACKEND: "hdf5", MDSPLUS_BACKEND: "mdsplus"}[backend] uri = f"imas:{backend_str}?path={tmp_path}" - entry = imaspy.DBEntry(uri, "x", dd_version="4.0.0") + entry = imas.DBEntry(uri, "x", dd_version="4.0.0") # Homogeneous core profiles: cp = entry.factory.core_profiles() @@ -102,7 +102,7 @@ def test_db_uri(backend, worker_id, tmp_path_factory): @pytest.fixture() def entry(test_db_uri): - return imaspy.DBEntry(test_db_uri, "r", dd_version="4.0.0") + return imas.DBEntry(test_db_uri, "r", dd_version="4.0.0") def test_invalid_arguments(entry): diff --git a/imaspy/test/test_hash.py b/imas/test/test_hash.py similarity index 69% rename from imaspy/test/test_hash.py rename to imas/test/test_hash.py index 1c82ecd3..903135ff 100644 --- a/imaspy/test/test_hash.py +++ b/imas/test/test_hash.py @@ -4,12 +4,12 @@ import pytest from xxhash import xxh3_64_digest -import imaspy +import imas @pytest.fixture def minimal(ids_minimal_types): - return imaspy.IDSFactory(xml_path=ids_minimal_types).new("minimal") + return imas.IDSFactory(xml_path=ids_minimal_types).new("minimal") def test_hash_str0d(minimal): @@ -17,7 +17,7 @@ def test_hash_str0d(minimal): minimal.str_0d = "Test str_0d hash" expected = xxh3_64_digest(s.encode("utf-8")) assert expected == b"r\x9d\x8dC.JN\x0e" - assert imaspy.util.calc_hash(minimal.str_0d) == expected + assert imas.util.calc_hash(minimal.str_0d) == expected def test_hash_str1d(minimal): @@ -30,7 +30,7 @@ def test_hash_str1d(minimal): hashes = list(map(xxh3_64_digest, string_list)) expected = xxh3_64_digest(struct.pack(" imaspy.DBEntry: - """Open and return an ``imaspy.DBEntry`` pointing to the training data.""" - return _initialize_training_db(imaspy.DBEntry) +def get_training_db_entry() -> imas.DBEntry: + """Open and return an ``imas.DBEntry`` pointing to the training data.""" + return _initialize_training_db(imas.DBEntry) def get_training_imas_db_entry(): diff --git a/imaspy/util.py b/imas/util.py similarity index 85% rename from imaspy/util.py rename to imas/util.py index 24af400b..5ab69349 100644 --- a/imaspy/util.py +++ b/imas/util.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Collection of useful helper methods when working with IMASPy. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Collection of useful helper methods when working with imas-python. """ @@ -10,14 +10,14 @@ import numpy -from imaspy.db_entry import DBEntry -from imaspy.ids_base import IDSBase -from imaspy.ids_factory import IDSFactory -from imaspy.ids_metadata import IDSMetadata -from imaspy.ids_primitive import IDSInt0D, IDSPrimitive -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure -from imaspy.ids_toplevel import IDSToplevel +from imas.db_entry import DBEntry +from imas.ids_base import IDSBase +from imas.ids_factory import IDSFactory +from imas.ids_metadata import IDSMetadata +from imas.ids_primitive import IDSInt0D, IDSPrimitive +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -32,7 +32,7 @@ def visit_children( ) -> None: """Apply a function to node and its children - IMASPy objects generally live in a tree structure. Similar to Pythons + imas-python objects generally live in a tree structure. Similar to Pythons :py:func:`map`, this method can be used to apply a function to objects within this tree structure. @@ -50,13 +50,13 @@ def visit_children( visit_empty: When set to True, also apply the function to empty nodes. accept_lazy: See documentation of :py:param:`iter_nonempty_() - `. Only + `. Only relevant when :param:`visit_empty` is False. Example: .. code-block:: python - # Print all filled leaf nodes in a given IMASPy IDSToplevel + # Print all filled leaf nodes in a given imas-python IDSToplevel visit_children(print, toplevel) See Also: @@ -80,9 +80,9 @@ def tree_iter( accept_lazy: bool = False, include_node: bool = False, ) -> Iterator[IDSBase]: - """Tree iterator for IMASPy structures. + """Tree iterator for imas-python structures. - Iterate (depth-first) through the whole subtree of an IMASPy structure. + Iterate (depth-first) through the whole subtree of an imas-python structure. Args: node: Node to start iterating from. @@ -95,7 +95,7 @@ def tree_iter( visit_empty: When set to True, iterate over empty nodes. accept_lazy: See documentation of :py:param:`iter_nonempty_() - `. Only + `. Only relevant when :param:`visit_empty` is False. include_node: When set to True the iterator will include the provided node (if the node is not a leaf node, it is included only when :param:`leaf_only` is @@ -104,7 +104,7 @@ def tree_iter( Example: .. code-block:: python - # Iterate over all filled leaf nodes in a given IMASPy IDSToplevel + # Iterate over all filled leaf nodes in a given imas-python IDSToplevel for node in tree_iter(toplevel): print(node) @@ -142,7 +142,7 @@ def idsdiff(struct1: IDSStructure, struct2: IDSStructure) -> None: struct1: IDS or structure within an IDS. struct2: IDS or structure within an IDS to compare against :param:`struct1`. """ - import imaspy._util as _util + import imas._util as _util _util.idsdiff_impl(struct1, struct2) @@ -161,7 +161,7 @@ def idsdiffgen( Keyword Args: accept_lazy: See documentation of :py:param:`iter_nonempty_() - `. + `. Yields: (description_or_path, node1, node2): tuple describing a difference: @@ -250,7 +250,7 @@ def _idsdiffgen( def resample(node, old_time, new_time, homogeneousTime=None, inplace=False, **kwargs): """Resample all primitives in their time dimension to a new time array""" - import imaspy._util as _util + import imas._util as _util return _util.resample_impl( node, old_time, new_time, homogeneousTime, inplace, **kwargs @@ -268,7 +268,7 @@ def print_tree(structure, hide_empty_nodes=True): structure: IDS structure to print hide_empty_nodes: Show or hide nodes without value. """ - import imaspy._util as _util + import imas._util as _util return _util.print_tree_impl(structure, hide_empty_nodes) @@ -289,16 +289,16 @@ def print_metadata_tree( Examples: .. code-block:: python - core_profiles = imaspy.IDSFactory().core_profiles() + core_profiles = imas.IDSFactory().core_profiles() # Print tree of the core_profiles IDS - imaspy.util.print_metadata_tree(core_profiles) + imas.util.print_metadata_tree(core_profiles) # Print descendants of the profiles_1d array of structure only: - imaspy.util.print_metadata_tree(core_profiles.metadata["profiles_1d"]) + imas.util.print_metadata_tree(core_profiles.metadata["profiles_1d"]) # Print descendants of the profiles_1d/electrons structure only: electrons_metadata = core_profiles.metadata["profiles_1d/electrons"] - imaspy.util.print_metadata_tree(electrons_metadata) + imas.util.print_metadata_tree(electrons_metadata) """ - import imaspy._util as _util + import imas._util as _util return _util.print_metadata_tree_impl(structure, maxdepth) @@ -308,7 +308,7 @@ def inspect(ids_node, hide_empty_nodes=False): Inspired by `rich.inspect`, but customized for IDS specifics. """ - import imaspy._util as _util + import imas._util as _util return _util.inspect_impl(ids_node, hide_empty_nodes) @@ -327,9 +327,9 @@ def find_paths(node: IDSBase, query: str) -> List[str]: A list of matching paths. Example: - >>> factory = imaspy.IDSFactory() + >>> factory = imas.IDSFactory() >>> core_profiles = factory.new("core_profiles") - >>> imaspy.util.find_paths(core_profiles, "(^|/)time$") + >>> imas.util.find_paths(core_profiles, "(^|/)time$") ['profiles_1d/time', 'profiles_2d/time', 'time'] """ dd_element = node.metadata._structure_xml @@ -401,10 +401,10 @@ def calc_hash(node: IDSBase) -> bytes: Example: .. code-block:: python - cp = imaspy.IDSFactory().core_profiles() + cp = imas.IDSFactory().core_profiles() cp.ids_properties.homogeneous_time = 0 - print(imaspy.util.calc_hash(cp).hex()) # 3b9b929756a242fd + print(imas.util.calc_hash(cp).hex()) # 3b9b929756a242fd """ return node._xxhash() @@ -421,17 +421,17 @@ def get_parent(node: IDSBase) -> Optional[IDSBase]: Example: .. code-block:: python - >>> cp = imaspy.IDSFactory().core_profiles() + >>> cp = imas.IDSFactory().core_profiles() >>> cp.profiles_1d.resize(2) - >>> imaspy.util.get_parent(cp.profiles_1d[0].electrons.temperature) + >>> imas.util.get_parent(cp.profiles_1d[0].electrons.temperature) - >>> imaspy.util.get_parent(cp.profiles_1d[0].electrons) + >>> imas.util.get_parent(cp.profiles_1d[0].electrons) - >>> imaspy.util.get_parent(cp.profiles_1d[0]) + >>> imas.util.get_parent(cp.profiles_1d[0]) - >>> imaspy.util.get_parent(cp.profiles_1d) + >>> imas.util.get_parent(cp.profiles_1d) - >>> imaspy.util.get_parent(cp) + >>> imas.util.get_parent(cp) >>> """ if isinstance(node, IDSToplevel): @@ -451,10 +451,10 @@ def get_time_mode(node: IDSBase) -> IDSInt0D: Example: .. code-block:: python - >>> cp = imaspy.IDSFactory().core_profiles() + >>> cp = imas.IDSFactory().core_profiles() >>> cp.ids_properties.homogeneous_time = 0 >>> cp.profiles_1d.resize(2) - >>> imaspy.util.get_time_mode(cp.profiles_1d[0].electrons.temperature) + >>> imas.util.get_time_mode(cp.profiles_1d[0].electrons.temperature) int(0) """ @@ -473,9 +473,9 @@ def get_toplevel(node: IDSBase) -> IDSToplevel: Example: .. code-block:: python - >>> cp = imaspy.IDSFactory().core_profiles() + >>> cp = imas.IDSFactory().core_profiles() >>> cp.profiles_1d.resize(2) - >>> imaspy.util.get_toplevel(cp.profiles_1d[0].electrons.temperature) + >>> imas.util.get_toplevel(cp.profiles_1d[0].electrons.temperature) """ return node._toplevel @@ -502,9 +502,9 @@ def get_full_path(node: IDSBase) -> str: Example: .. code-block:: python - >>> cp = imaspy.IDSFactory().core_profiles() + >>> cp = imas.IDSFactory().core_profiles() >>> cp.profiles_1d.resize(2) - >>> imaspy.util.get_full_path(cp.profiles_1d[1].electrons.temperature) + >>> imas.util.get_full_path(cp.profiles_1d[1].electrons.temperature) 'profiles_1d[1]/electrons/temperature' """ return node._path @@ -514,7 +514,7 @@ def get_data_dictionary_version(obj: Union[IDSBase, DBEntry, IDSFactory]) -> str """Find out the version of the data dictionary definitions that this object uses. Args: - obj: Any IMASPy object that is data-dictionary dependent. + obj: Any imas-python object that is data-dictionary dependent. Returns: The data dictionary version, e.g. ``"3.38.1"``. diff --git a/imaspy/__init__.py b/imaspy/__init__.py deleted file mode 100644 index a1947718..00000000 --- a/imaspy/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. - -# isort: skip_file - -from packaging.version import Version as _V - -from . import _version - -__version__ = _version.get_versions()["version"] - -version = __version__ - -# Import logging _first_ -from . import setup_logging - -# Import main user API objects in the imaspy module -from .db_entry import DBEntry -from .ids_factory import IDSFactory -from .ids_convert import convert_ids -from .ids_identifiers import identifiers - -# Load the IMASPy IMAS AL/DD core -from . import ( - db_entry, - dd_helpers, - dd_zip, - util, -) - -PUBLISHED_DOCUMENTATION_ROOT = ( - "https://sharepoint.iter.org/departments/POP/CM/IMDesign/" - "Code%20Documentation/IMASPy-doc" -) -"""URL to the published documentation.""" -OLDEST_SUPPORTED_VERSION = _V("3.22.0") -"""Oldest Data Dictionary version that is supported by IMASPy.""" diff --git a/imaspy/__main__.py b/imaspy/__main__.py deleted file mode 100644 index 6e82a92d..00000000 --- a/imaspy/__main__.py +++ /dev/null @@ -1,17 +0,0 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Support module to run imaspy as a module: - -.. code-block:: bash - :caption: Options to run imaspy CLI interface - - # Run as a module (implemented in imaspy/__main__.py) - python -m imaspy - - # Run as "program" (see project.scripts in pyproject.toml) - imaspy -""" - -from imaspy.command.cli import cli - -cli() diff --git a/imaspy/_version.py b/imaspy/_version.py deleted file mode 100644 index 581df976..00000000 --- a/imaspy/_version.py +++ /dev/null @@ -1,658 +0,0 @@ - -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. -# Generated by versioneer-0.28 -# https://github.com/python-versioneer/python-versioneer - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "pep440" - cfg.tag_prefix = "" - cfg.parentdir_prefix = "" - cfg.versionfile_source = "imaspy/_version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} diff --git a/imaspy/backends/imas_core/__init__.py b/imaspy/backends/imas_core/__init__.py deleted file mode 100644 index 8d266ac2..00000000 --- a/imaspy/backends/imas_core/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Subpackage implementing data access through the IMAS Access Layer Core. -""" diff --git a/imaspy/backends/netcdf/__init__.py b/imaspy/backends/netcdf/__init__.py deleted file mode 100644 index 93cc2a6f..00000000 --- a/imaspy/backends/netcdf/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""NetCDF IO support for IMASPy. Requires [netcdf] extra dependencies. -""" diff --git a/imaspy/dd_helpers.py b/imaspy/dd_helpers.py deleted file mode 100644 index 21a7775f..00000000 --- a/imaspy/dd_helpers.py +++ /dev/null @@ -1,300 +0,0 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Helper functions to build IDSDef.xml""" - -import logging -import os -import re -import shutil -import subprocess -from io import BytesIO -from pathlib import Path -from typing import Sequence, Tuple, Union -from urllib.request import urlopen -from zipfile import ZIP_DEFLATED, ZipFile - -from packaging.version import Version as V - -logger = logging.getLogger(__name__) - -_idsdef_zip_relpath = Path("imaspy/assets/IDSDef.zip") -_build_dir = Path("build") -_saxon_local_default_name = "saxon9he.jar" # For pre-3.30.0 builds -_saxon_regex = "saxon((.(?!test|xqj))*).jar" # Can be used in re.match - - -def prepare_data_dictionaries(): - """Build IMAS IDSDef.xml files for each tagged version in the DD repository - 1. Search for saxon or download it - 2. Clone the DD repository (ask for user/pass unless ssh key access is available) - 3. Generate IDSDef.xml and rename to IDSDef_${version}.xml - 4. Zip all these IDSDefs together and include in wheel - """ - from git import Repo - - saxon_jar_path = get_saxon() - repo: Repo = get_data_dictionary_repo() - if repo: - newest_version_and_tag = (V("0"), None) - for tag in repo.tags: - version_and_tag = (V(str(tag)), tag) - if V(str(tag)) > V("3.21.1"): - newest_version_and_tag = max(newest_version_and_tag, version_and_tag) - logger.debug("Building data dictionary version %s", tag) - build_data_dictionary(repo, tag, saxon_jar_path) - - logger.info("Creating zip file of DD versions") - - if _idsdef_zip_relpath.is_file(): - logger.warning("Overwriting '%s'", _idsdef_zip_relpath) - - with ZipFile( - _idsdef_zip_relpath, - mode="w", # this needs w, since zip can have multiple same entries - compression=ZIP_DEFLATED, - ) as dd_zip: - for filename in _build_dir.glob("[0-9]*.xml"): - arcname = Path("data-dictionary").joinpath(*filename.parts[1:]) - dd_zip.write(filename, arcname=arcname) - # Include identifiers from latest tag in zip file - repo.git.checkout(newest_version_and_tag[1], force=True) - # DD layout <= 4.0.0 - for filename in Path("data-dictionary").glob("*/*identifier.xml"): - arcname = Path("identifiers").joinpath(*filename.parts[1:]) - dd_zip.write(filename, arcname=arcname) - # DD layout > 4.0.0 - for filename in Path("data-dictionary").glob("schemas/*/*identifier.xml"): - arcname = Path("identifiers").joinpath(*filename.parts[2:]) - dd_zip.write(filename, arcname=arcname) - - -# pre 3.30.0 versions of the DD have the `saxon9he.jar` file path hardcoded -# in their makefiles. To be sure we can build everything, we link whatever -# saxon we can find to a local file called saxon9he.jar -def get_saxon() -> Path: - """Search for saxon*.jar and return the path or download it. - The DD build works by having Saxon in the CLASSPATH, called saxon9he.jar - until DD version 3.30.0. After 3.30.0 Saxon is found by the SAXONJARFILE env - variable. We will 'cheat' a little bit later by symlinking saxon9he.jar to - any version of saxon we found. - - Check: - 1. CLASSPATH - 2. `which saxon` - 3. /usr/share/java/* - 4. or download it - """ - - local_saxon_path = Path.cwd() / _saxon_local_default_name - if local_saxon_path.exists(): - logger.debug("Something already at '%s' not creating anew", local_saxon_path) - return local_saxon_path - - saxon_jar_origin = Path( - find_saxon_classpath() - or find_saxon_bin() - or find_saxon_jar() - or download_saxon() - ) - logger.info("Found Saxon JAR '%s'", saxon_jar_origin) - if saxon_jar_origin.name != _saxon_local_default_name: - try: - os.symlink(saxon_jar_origin, local_saxon_path) - except FileExistsError: - # Another process could have created the symlink while we were searching - logger.debug( - "Link '%s' exists, parallel process might've created it", - local_saxon_path, - ) - return local_saxon_path - return saxon_jar_origin - - -def find_saxon_jar(): - # This finds multiple versions on my system, but they are symlinked together. - # take the shortest one. - jars = [ - path - for path in Path("/usr/share/java").rglob("*") - if re.match(_saxon_regex, path.name, flags=re.IGNORECASE) - ] - - if jars: - saxon_jar_path = min(jars, key=lambda x: len(x.parts)) - return saxon_jar_path - - -def find_saxon_classpath(): - """Search JAVAs CLASSPATH for a Saxon .jar""" - classpath = os.environ.get("CLASSPATH", "") - for part in re.split(";|:", classpath): - if ( - part.endswith(".jar") - and part.split("/")[-1].startswith("saxon") - and "test" not in part - and "xqj" not in part - ): - return part - - -def find_saxon_bin(): - """Search for a saxon executable""" - saxon_bin = shutil.which("saxon") - if saxon_bin: - with open(saxon_bin, "r") as file: - for line in file: - saxon_jar_path = re.search("[^ ]*saxon[^ ]*jar", line) - if saxon_jar_path: - return saxon_jar_path.group(0) - - -def download_saxon(): - """Downloads a zipfile containing Saxon and extract it to the current dir. - Return the full path to Saxon. This can be any Saxon version. Scripts that - wrap this should probably manipulate either the name of this file, and/or - the CLASSPATH""" - - SAXON_PATH = "https://github.com/Saxonica/Saxon-HE/releases/download/SaxonHE10-9/SaxonHE10-9J.zip" # noqa: E501 - - resp = urlopen(SAXON_PATH, timeout=120.0) - zipfile = ZipFile(BytesIO(resp.read())) - # Zipfile has a list of the ZipInfos. Look inside for a Saxon jar - for file in zipfile.filelist: - if re.match(_saxon_regex, file.filename, flags=re.IGNORECASE): - path = zipfile.extract(file) - del zipfile - return path - raise FileNotFoundError(f"No Saxon jar found in given zipfile '{SAXON_PATH}'") - - -def get_data_dictionary_repo() -> Tuple[bool, bool]: - try: - import git # Import git here, the user might not have it! - except ModuleNotFoundError: - raise RuntimeError( - "Could not find 'git' module, try 'pip install gitpython'. \ - Will not build Data Dictionaries!" - ) - - # We need the actual source code (for now) so grab it from ITER - dd_repo_path = "data-dictionary" - - if "DD_DIRECTORY" in os.environ: - logger.info("Found DD_DIRECTORY, copying") - try: - shutil.copytree(os.environ["DD_DIRECTORY"], dd_repo_path) - except FileExistsError: - pass - else: - logger.info("Trying to pull data dictionary git repo from ITER") - - # Set up a bare repo and fetch the access-layer repository in it - os.makedirs(dd_repo_path, exist_ok=True) - try: - repo = git.Repo(dd_repo_path) - except git.exc.InvalidGitRepositoryError: - repo = git.Repo.init(dd_repo_path) - logger.info("Set up local git repository {!s}".format(repo)) - - try: - origin = repo.remote() - except ValueError: - dd_repo_url = "ssh://git@git.iter.org/imas/data-dictionary.git" - origin = repo.create_remote("origin", url=dd_repo_url) - logger.info("Set up remote '{!s}' linking to '{!s}'".format(origin, origin.url)) - - try: - origin.fetch(tags=True) - except git.exc.GitCommandError as ee: - logger.warning( - "Could not fetch tags from %s. Git reports:\n %s." "\nTrying to continue", - list(origin.urls), - ee, - ) - else: - logger.info("Remote tags fetched") - return repo - - -def _run_data_dictionary( - args: Union[Sequence, str], tag: str, saxon_jar_path: str -) -> int: - """Run in a Data Dictionary environment. Used e.g. to run the DD Makefile - - Args: - args: The "args" argument directly passed to :func:`subprocess.run`, - e.g. ``["make", "clean"]`` - tag: The DD version tag that will be printed on error - saxon_jar_path: The path to the saxon jar; Added to CLASSPATH and used - to generate the DD - """ - env = os.environ.copy() - env["CLASSPATH"] = f"{saxon_jar_path}:{env.get('CLASSPATH', '')}" - result = subprocess.run( - args, - bufsize=0, - capture_output=True, - cwd=os.getcwd() + "/data-dictionary", - env=env, - text=True, - ) - - if result.returncode != 0: - logger.warning("Error making DD version %s, make reported:", tag) - logger.warning("CLASSPATH ='%s'", saxon_jar_path) - logger.warning("PATH = '%s'", os.environ.get("PATH", "")) - logger.warning("stdout = '%s'", result.stdout.strip()) - logger.warning("stderr = '%s'", result.stderr.strip()) - logger.warning("continuing without DD version %s", tag) - else: - logger.debug( - "Successful make for DD %s.\n-- Make stdout --\n%s\n-- Make stderr --\n%s", - tag, - result.stdout, - result.stderr, - ) - return result.returncode - - -def build_data_dictionary(repo, tag: str, saxon_jar_path: str, rebuild=False) -> None: - """Build a single version of the data dictionary given by the tag argument - if the IDS does not already exist. - - In the data-dictionary repository sometimes IDSDef.xml is stored - directly, in which case we do not call make. - - Args: - repo: Repository object containing the DD source code - tag: The DD version tag that will be build - saxon_jar_path: The path to the saxon jar; Added to CLASSPATH and used - to generate the DD - rebuild: If true, overwrites existing pre-build tagged DD version - """ - _build_dir.mkdir(exist_ok=True) - result_xml = _build_dir / f"{tag}.xml" - - if result_xml.exists() and not rebuild: - logger.debug(f"XML for tag '{tag}' already exists, skipping") - return - - repo.git.checkout(tag, force=True) - if _run_data_dictionary(["make", "clean"], tag, saxon_jar_path) != 0: - return - if _run_data_dictionary(["make", "IDSDef.xml"], tag, saxon_jar_path) != 0: - return - - # copy and delete original instead of move (to follow symlink) - IDSDef = Path("data-dictionary/IDSDef.xml") - try: - shutil.copy( - IDSDef, # Hardcoded in access-layer makefile - result_xml, - follow_symlinks=True, - ) - except shutil.SameFileError: - pass - IDSDef.unlink(missing_ok=True) - - -if __name__ == "__main__": - prepare_data_dictionaries() diff --git a/imaspy/test/test_dd_helpers.py b/imaspy/test/test_dd_helpers.py deleted file mode 100644 index 324e789d..00000000 --- a/imaspy/test/test_dd_helpers.py +++ /dev/null @@ -1,134 +0,0 @@ -import pytest - -from imaspy.dd_helpers import find_saxon_classpath - -# TODO: Write tests! -# def prepare_data_dictionaries(): -# def get_saxon(): -# def find_saxon_jar(): - -# Quadruplets of (cluster, module, real path, name) -saxon_binary_quadruplets = ( - ( - "SDCC", - "Saxon-HE/10.3-Java-1.8", - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-1.8/saxon-he-10.3.jar", - "saxon-he-10.3.jar", - ), - ( - "SDCC", - "Saxon-HE/10.3-Java-11", - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar", - "saxon-he-10.3.jar", - ), - ( - "HPC", - "Saxon-HE/9.7.0.14-Java-1.6.0_45", - "/work/imas/opt/EasyBuild/software/Saxon-HE/9.7.0.14-Java-1.6.0_45/saxon9he.jar", - "saxon9he.jar", - ), - ( - "HPC", - "Saxon-HE/9.7.0.4-Java-1.7.0_79", - "/work/imas/opt/EasyBuild/software/Saxon-HE/9.7.0.4-Java-1.7.0_79/saxon9he.jar", - "saxon9he.jar", - ), - ( - "HPC", - "Saxon-HE/9.7.0.21-Java-1.8.0_162", - "/work/imas/opt/EasyBuild/software/Saxon-HE/9.7.0.21-Java-1.8.0_162/saxon9he.jar", - "saxon9he.jar", - ), - ( - "HPC", - "Saxon-HE/9.9.1.7-Java-13", - "/work/imas/opt/EasyBuild/software/Saxon-HE/9.9.1.7-Java-13/saxon9he.jar", - "saxon9he.jar", - ), - ( - "HPC", - "Saxon-HE/10.3-Java-11", - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar", - "saxon-he-10.3.jar", - ), -) - -saxon_nonmatches = ( - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-test-10.3.jar", -) - - -# find_saxon_bin tries to find saxon in the CLASSPATH env variable -# It is thus per definition environment dependent -def test_empty_classpath(monkeypatch): - monkeypatch.setenv("CLASSPATH", "") - saxon_jar_path = find_saxon_classpath() - assert saxon_jar_path is None - - -@pytest.mark.parametrize("cluster,module,path,name", saxon_binary_quadruplets) -def test_classpath(monkeypatch, cluster, module, path, name): - monkeypatch.setenv("CLASSPATH", path) - saxon_jar_path = find_saxon_classpath() - assert saxon_jar_path == path - - -@pytest.mark.parametrize("path", saxon_nonmatches) -def test_classpath_do_not_match(monkeypatch, path): - monkeypatch.setenv("CLASSPATH", path) - saxon_jar_path = find_saxon_classpath() - assert saxon_jar_path is None - - -# ITER SDCC login01 20210617 -# module load GCCcore/10.2.0 -# module load Python/3.8.6-GCCcore-10.2.0 -# module load MDSplus/7.96.17-GCCcore-10.2.0 -# module load HDF5/1.10.7-iimpi-2020b # todo: Intel MPI version? -# module load Boost/1.74.0-GCCcore-10.2.0 -# module load MDSplus-Java/7.96.17-GCCcore-10.2.0-Java-11 -# module load Saxon-HE/10.3-Java-11 -def test_classpath_sdcc(monkeypatch): - monkeypatch.setenv( - "CLASSPATH", - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-xqj-10.3.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-test-10.3.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/jline-2.9.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar:" - "/work/imas/opt/EasyBuild/software/MDSplus-Java/7.96.17-GCCcore-10.2.0-Java-11/java/classes/*", - ) - saxon_jar_path = find_saxon_classpath() - assert ( - saxon_jar_path - == "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar" - ) - - -# ITER HPC login01 20210617 -# module load GCCcore/10.2.0 -# module load Python/3.8.6-GCCcore-10.2.0 -# module load MDSplus/7.96.17-GCCcore-10.2.0 -# module load HDF5/1.10.7-iimpi-2020b # todo: Intel MPI version? -# module load Boost/1.74.0-GCCcore-10.2.0 -# module load MDSplus-Java/7.96.17-GCCcore-10.2.0-Java-11 -# module load Saxon-HE/10.3-Java-11 -def test_classpath_hpc(monkeypatch): - monkeypatch.setenv( - "CLASSPATH", - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-xqj-10.3.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-test-10.3.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/jline-2.9.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar:" - "/work/imas/opt/EasyBuild/software/MDSplus-Java/7.96.17-GCCcore-10.2.0-Java-11/java/classes/*", - ) - saxon_jar_path = find_saxon_classpath() - assert ( - saxon_jar_path - == "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar" - ) - - -# TODO: Write tests! -# def find_saxon_bin(): -# def get_data_dictionary_repo(): -# def build_data_dictionary(): diff --git a/pyproject.toml b/pyproject.toml index dccd6912..87f92363 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,23 +1,25 @@ [build-system] # Minimum requirements for the build system to execute. # Keep this on a single line for the grep magic of build scripts to work -requires = ["setuptools>=61", "wheel", "numpy", "gitpython", "packaging", "tomli;python_version<'3.11'", "versioneer[toml]"] -# needs saxon9he.jar in CLASSPATH +requires = ["setuptools>=61", "wheel", "numpy", "gitpython", "saxonche","packaging", "tomli;python_version<'3.11'", "setuptools_scm>8"] build-backend = "setuptools.build_meta" [project] -name = "imaspy" +name = "imas-python" authors = [ - {name = "IMASPy Developers"}, - {name = "Olivier Hoenen", email = "olivier.hoenen@iter.org"}, + {name = "Karel Lucas van de Plassche", email = "karelvandeplassche@gmail.com"}, + {name = "Daan van Vugt", email = "dvanvugt@ignitioncomputing.com"}, + {name = "Maarten Sebregts", email = "msebregts@ignitioncomputing.com"}, + {name = "ITER Organization"}, + {email = "imas-support@iter.org"}, ] description = "Pythonic wrappers for the IMAS Access Layer" readme = {file = "README.md", content-type = "text/markdown"} requires-python = ">=3.7" license = {file = "LICENSE.txt"} classifiers = [ - "Development Status :: 3 - Alpha", + "Development Status :: 5 - Production/Stable", "Environment :: Console", "Environment :: Plugins", "Environment :: X11 Applications", @@ -32,9 +34,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Science/Research", "Intended Audience :: System Administrators", -# The license is prepended by private which makes it a valid trove classifier, -# it will also prevent uploading to package indices such as PyPI - "Private :: License :: ITER License", + "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)", "Natural Language :: English", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", @@ -64,12 +64,14 @@ dependencies = [ "importlib_resources;python_version<'3.9'", "packaging", "xxhash >= 2", + "saxonche", + "gitpython" ] [project.optional-dependencies] # these self-dependencies are available since pip 21.2 all = [ - "imaspy[test,docs,imas-core,netcdf,h5py]" + "imas[test,docs,imas-core,netcdf,h5py]" ] docs = [ "sphinx>=6.0.0,<7.0.0", @@ -77,7 +79,10 @@ docs = [ "sphinx-immaterial>=0.11.0,<0.12", "sphinx-click", ] -imas-core = [ "imas-core@git+ssh://git@git.iter.org/imas/al-core.git@main" ] + +# TODO enable when imas-core is available on pypi +# imas-core = [ "imas-core@git+https://github.com/iterorganization/imas-core.git@main" ] + netcdf = [ "netCDF4>=1.7.0", ] @@ -99,15 +104,15 @@ test = [ ] [project.scripts] -build_DD = "imaspy.dd_helpers:prepare_data_dictionaries" -imaspy = "imaspy.command.cli:cli" +build_DD = "imas.dd_helpers:prepare_data_dictionaries" +imas = "imas.command.cli:cli" [project.urls] -homepage = "https://git.iter.org/projects/IMAS/repos/imaspy" +homepage = "https://github.com/iterorganization/imas-python" [tool.setuptools.packages.find] where = ["."] -include = ["imaspy*"] +include = ["imas*"] [tool.pytest.ini_options] minversion = "6.0" @@ -116,19 +121,11 @@ markers = [ "cli: Tests for the command line interface.", "slow: Slow tests.", ] -testpaths = "imaspy" - -[tool.versioneer] -VCS = "git" -style = "pep440" -versionfile_source = "imaspy/_version.py" -versionfile_build = "imaspy/_version.py" -tag_prefix = "" -parentdir_prefix = "" +testpaths = "imas" -[tool.black] -# Exclude versioneer-generated file -extend-exclude = "imaspy/_version.py" +[tool.setuptools_scm] +write_to = "imas/_version.py" +local_scheme = "no-local-version" [tool.isort] profile = "black" diff --git a/setup.cfg b/setup.cfg index f8fff4f2..8e5dd292 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,7 +13,7 @@ max-line-length = 88 per-file-ignores= # Ignore import errors in __init__.py (import not at top of file; imported but # unused) - imaspy/__init__.py:E402,F401 + imas/__init__.py:E402,F401 # Lots of CLASSPATHS in this test file: adhering to line length would be less # readable - imaspy/test/test_dd_helpers.py:E501 + imas/test/test_dd_helpers.py:E501 diff --git a/setup.py b/setup.py index a859c164..692fb5cf 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,11 @@ # pylint: disable=wrong-import-position -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """ Packaging settings. Inspired by a minimal setup.py file, the Pandas cython build and the access-layer setup template. -The installable IMASPy package tries to follow in the following order: +The installable imas-python package tries to follow in the following order: - The style guide for Python code [PEP8](https://www.python.org/dev/peps/pep-0008/) - The [PyPA guide on packaging projects]( https://packaging.python.org/guides/distributing-packages-using-setuptools/#distributing-packages) @@ -25,11 +25,9 @@ import importlib.util import site import traceback - # Allow importing local files, see https://snarky.ca/what-the-heck-is-pyproject-toml/ import sys import warnings - # Import other stdlib packages from pathlib import Path @@ -49,7 +47,6 @@ # Ensure the current folder is on the import path: sys.path.append(str(Path(__file__).parent.resolve())) -import versioneer # noqa cannonical_python_command = "module load Python/3.8.6-GCCcore-10.2.0" @@ -80,13 +77,13 @@ this_dir = this_file.parent.resolve() # Start: Load dd_helpers -dd_helpers_file = this_dir / "imaspy/dd_helpers.py" +dd_helpers_file = this_dir / "imas/dd_helpers.py" assert dd_helpers_file.is_file() spec = importlib.util.spec_from_file_location("dd_helpers", dd_helpers_file) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) -sys.modules["imaspy.dd_helpers"] = module -from imaspy.dd_helpers import prepare_data_dictionaries # noqa +sys.modules["imas.dd_helpers"] = module +from imas.dd_helpers import prepare_data_dictionaries # noqa # End: Load dd_helpers @@ -114,9 +111,8 @@ def run(self): # - `pip install -e .`` (from git clone) # - `python -m build`` # - Source tarball from git-archive. Note: version only picked up when doing git-archive -# from a tagged release, otherwise version will be "0+unknown" (expected versioneer -# behaviour). -# `git archive HEAD -v -o imaspy.tar.gz && pip install imaspy.tar.gz` +# from a tagged release, +# `git archive HEAD -v -o imas.tar.gz && pip install imas.tar.gz` cmd_class = {} build_overrides = {"build_ext": build_ext, "build_py": build_py, "sdist": sdist} if bdist_wheel: @@ -139,7 +135,6 @@ def run(self): if __name__ == "__main__": setup( - version=versioneer.get_version(), zip_safe=False, # https://mypy.readthedocs.io/en/latest/installed_packages.html - cmdclass=versioneer.get_cmdclass({"build_DD": BuildDDCommand, **cmd_class}), - ) + cmdclass={"build_DD": BuildDDCommand, **cmd_class} + ) \ No newline at end of file diff --git a/tools/compare_lowlevel_access_patterns.py b/tools/compare_lowlevel_access_patterns.py index 03b3e6d2..88a5f2aa 100644 --- a/tools/compare_lowlevel_access_patterns.py +++ b/tools/compare_lowlevel_access_patterns.py @@ -1,4 +1,4 @@ -"""Compare the access patterns of the lowlevel AL API between IMASPy and the HLI. +"""Compare the access patterns of the lowlevel AL API between imas-python and the HLI. """ from functools import wraps @@ -9,9 +9,9 @@ import click import imas -import imaspy -from imaspy.test.test_helpers import fill_with_random_data -from imaspy.ids_defs import IDS_TIME_MODE_HETEROGENEOUS +import imas +from imas.test.test_helpers import fill_with_random_data +from imas.ids_defs import IDS_TIME_MODE_HETEROGENEOUS class ALWrapper: @@ -46,10 +46,10 @@ def wrapper(*args, **kwargs): setattr(sys.modules[item], alias, wrapper) -def compare_ids_put(imaspy_ids, hli_ids): +def compare_ids_put(imas_ids, hli_ids): imas._al_lowlevel._log.clear() # Start with hli IDS - dbentry = imas.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") + dbentry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") dbentry.create() try: dbentry.put(hli_ids) @@ -59,49 +59,49 @@ def compare_ids_put(imaspy_ids, hli_ids): dbentry.close() hli_log = imas._al_lowlevel._log imas._al_lowlevel._log = [] - # And then the imaspy IDS - dbentry = imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") + # And then the imas IDS + dbentry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") dbentry.create() try: - dbentry.put(imaspy_ids) + dbentry.put(imas_ids) except Exception as exc: - print("Caught error while putting imaspy ids:", exc) + print("Caught error while putting imas ids:", exc) traceback.print_exc() dbentry.close() - imaspy_log = imas._al_lowlevel._log + imas_log = imas._al_lowlevel._log imas._al_lowlevel._log = [] hli_log_text = "\n".join("\t".join(item) for item in hli_log) - imaspy_log_text = "\n".join("\t".join(item) for item in imaspy_log) + imas_log_text = "\n".join("\t".join(item) for item in imas_log) Path("/tmp/hli.log").write_text(hli_log_text) - Path("/tmp/imaspy.log").write_text(imaspy_log_text) - print("Logs stored in /tmp/hli.log and /tmp/imaspy.log") + Path("/tmp/imas.log").write_text(imas_log_text) + print("Logs stored in /tmp/hli.log and /tmp/imas.log") -def compare_ids_get(imaspy_ids): +def compare_ids_get(imas_ids): # First put the ids - idbentry = imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") + idbentry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") idbentry.create() - idbentry.put(imaspy_ids) + idbentry.put(imas_ids) - dbentry = imas.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") + dbentry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") dbentry.open() # Start with hli IDS imas._al_lowlevel._log.clear() - dbentry.get(imaspy_ids.metadata.name) + dbentry.get(imas_ids.metadata.name) hli_log = imas._al_lowlevel._log imas._al_lowlevel._log = [] - # And then the imaspy IDS - idbentry.get(imaspy_ids.metadata.name) - imaspy_log = imas._al_lowlevel._log + # And then the imas IDS + idbentry.get(imas_ids.metadata.name) + imas_log = imas._al_lowlevel._log imas._al_lowlevel._log = [] # Cleanup dbentry.close() idbentry.close() hli_log_text = "\n".join("\t".join(item) for item in hli_log) - imaspy_log_text = "\n".join("\t".join(item) for item in imaspy_log) + imas_log_text = "\n".join("\t".join(item) for item in imas_log) Path("/tmp/hli.log").write_text(hli_log_text) - Path("/tmp/imaspy.log").write_text(imaspy_log_text) - print("Logs stored in /tmp/hli.log and /tmp/imaspy.log") + Path("/tmp/imas.log").write_text(imas_log_text) + print("Logs stored in /tmp/hli.log and /tmp/imas.log") @click.command() @@ -113,33 +113,33 @@ def compare_ids_get(imaspy_ids): help="Use heterogeneous time mode instead of homogeneous time.", ) def main(ids_name, method, heterogeneous): - """Compare lowlevel calls done by IMASPy vs. the Python HLI + """Compare lowlevel calls done by imas-python vs. the Python HLI This program fills the provided IDS with random data, then does I/O with it using - both the Python HLI and the IMASPy APIs. The resulting calls to the lowlevel Access - Layer are logged to respectively /tmp/hli.log and /tmp/imaspy.log. + both the Python HLI and the imas-python APIs. The resulting calls to the lowlevel Access + Layer are logged to respectively /tmp/hli.log and /tmp/imas.log. You may use your favorite diff tool to compare the two files. \b IDS_NAME: The name of the IDS to use for testing, for example "core_profiles". """ - imaspy_ids = imaspy.IDSFactory().new(ids_name) + imas_ids = imas.IDSFactory().new(ids_name) hli_ids = getattr(imas, ids_name)() - fill_with_random_data(imaspy_ids) - hli_ids.deserialize(imaspy_ids.serialize()) + fill_with_random_data(imas_ids) + hli_ids.deserialize(imas_ids.serialize()) if heterogeneous: # Change time mode time_mode = IDS_TIME_MODE_HETEROGENEOUS - imaspy_ids.ids_properties.homogeneous_time = time_mode + imas_ids.ids_properties.homogeneous_time = time_mode hli_ids.ids_properties.homogeneous_time = time_mode if method == "put": - compare_ids_put(imaspy_ids, hli_ids) + compare_ids_put(imas_ids, hli_ids) elif method == "get": - compare_ids_get(imaspy_ids) + compare_ids_get(imas_ids) if __name__ == "__main__": diff --git a/tools/extract_test_data.py b/tools/extract_test_data.py index 60e61116..d64c4f51 100644 --- a/tools/extract_test_data.py +++ b/tools/extract_test_data.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. import os import imas diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index 18e34c2f..00000000 --- a/versioneer.py +++ /dev/null @@ -1,2205 +0,0 @@ - -# Version: 0.28 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/python-versioneer/python-versioneer -* Brian Warner -* License: Public Domain (Unlicense) -* Compatible with: Python 3.7, 3.8, 3.9, 3.10 and pypy3 -* [![Latest Version][pypi-image]][pypi-url] -* [![Build Status][travis-image]][travis-url] - -This is a tool for managing a recorded version number in setuptools-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -Versioneer provides two installation modes. The "classic" vendored mode installs -a copy of versioneer into your repository. The experimental build-time dependency mode -is intended to allow you to skip this step and simplify the process of upgrading. - -### Vendored mode - -* `pip install versioneer` to somewhere in your $PATH - * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is - available, so you can also use `conda install -c conda-forge versioneer` -* add a `[tool.versioneer]` section to your `pyproject.toml` or a - `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) - * Note that you will need to add `tomli; python_version < "3.11"` to your - build-time dependencies if you use `pyproject.toml` -* run `versioneer install --vendor` in your source tree, commit the results -* verify version information with `python setup.py version` - -### Build-time dependency mode - -* `pip install versioneer` to somewhere in your $PATH - * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is - available, so you can also use `conda install -c conda-forge versioneer` -* add a `[tool.versioneer]` section to your `pyproject.toml` or a - `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) -* add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`) - to the `requires` key of the `build-system` table in `pyproject.toml`: - ```toml - [build-system] - requires = ["setuptools", "versioneer[toml]"] - build-backend = "setuptools.build_meta" - ``` -* run `versioneer install --no-vendor` in your source tree, commit the results -* verify version information with `python setup.py version` - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes). - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/python-versioneer/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other languages) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg` and `pyproject.toml`, if necessary, - to include any new configuration settings indicated by the release notes. - See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install --[no-]vendor` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - -## Similar projects - -* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time - dependency -* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of - versioneer -* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools - plugin - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the "Unlicense", as described in -https://unlicense.org/. - -[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg -[pypi-url]: https://pypi.python.org/pypi/versioneer/ -[travis-image]: -https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg -[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer - -""" -# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring -# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements -# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error -# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with -# pylint:disable=attribute-defined-outside-init,too-many-arguments - -import configparser -import errno -import json -import os -import re -import subprocess -import sys -from pathlib import Path -from typing import Callable, Dict -import functools - -have_tomllib = True -if sys.version_info >= (3, 11): - import tomllib -else: - try: - import tomli as tomllib - except ImportError: - have_tomllib = False - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - my_path = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(my_path)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals(): - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(my_path), versioneer_py)) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise OSError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - root = Path(root) - pyproject_toml = root / "pyproject.toml" - setup_cfg = root / "setup.cfg" - section = None - if pyproject_toml.exists() and have_tomllib: - try: - with open(pyproject_toml, 'rb') as fobj: - pp = tomllib.load(fobj) - section = pp['tool']['versioneer'] - except (tomllib.TOMLDecodeError, KeyError): - pass - if not section: - parser = configparser.ConfigParser() - with open(setup_cfg) as cfg_file: - parser.read_file(cfg_file) - parser.get("versioneer", "VCS") # raise error if missing - - section = parser["versioneer"] - - cfg = VersioneerConfig() - cfg.VCS = section['VCS'] - cfg.style = section.get("style", "") - cfg.versionfile_source = section.get("versionfile_source") - cfg.versionfile_build = section.get("versionfile_build") - cfg.tag_prefix = section.get("tag_prefix") - if cfg.tag_prefix in ("''", '""', None): - cfg.tag_prefix = "" - cfg.parentdir_prefix = section.get("parentdir_prefix") - cfg.verbose = section.get("verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - HANDLERS.setdefault(vcs, {})[method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -LONG_VERSION_PY['git'] = r''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. -# Generated by versioneer-0.28 -# https://github.com/python-versioneer/python-versioneer - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%%d" %% (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [versionfile_source] - if ipy: - files.append(ipy) - if "VERSIONEER_PEP518" not in globals(): - try: - my_path = __file__ - if my_path.endswith((".pyc", ".pyo")): - my_path = os.path.splitext(my_path)[0] + ".py" - versioneer_file = os.path.relpath(my_path) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - with open(".gitattributes", "r") as fobj: - for line in fobj: - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - break - except OSError: - pass - if not present: - with open(".gitattributes", "a+") as fobj: - fobj.write(f"{versionfile_source} export-subst\n") - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.28) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except OSError: - raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(cmdclass=None): - """Get the custom setuptools subclasses used by Versioneer. - - If the package uses a different cmdclass (e.g. one from numpy), it - should be provide as an argument. - """ - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - - cmds = {} if cmdclass is None else cmdclass.copy() - - # we add "version" to setuptools - from setuptools import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version - - # we override "build_py" in setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # pip install -e . and setuptool/editable_wheel will invoke build_py - # but the build_py command is not expected to copy any files. - - # we override different "build_py" commands for both environments - if 'build_py' in cmds: - _build_py = cmds['build_py'] - else: - from setuptools.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - if getattr(self, "editable_mode", False): - # During editable installs `.py` and data files are - # not copied to build_lib - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py - - if 'build_ext' in cmds: - _build_ext = cmds['build_ext'] - else: - from setuptools.command.build_ext import build_ext as _build_ext - - class cmd_build_ext(_build_ext): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_ext.run(self) - if self.inplace: - # build_ext --inplace will only build extensions in - # build/lib<..> dir with no _version.py to write to. - # As in place builds will already have a _version.py - # in the module dir, we do not need to write one. - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if not cfg.versionfile_build: - return - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - if not os.path.exists(target_versionfile): - print(f"Warning: {target_versionfile} does not exist, skipping " - "version update. This can happen if you are running build_ext " - "without first running build_py.") - return - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_ext"] = cmd_build_ext - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if 'py2exe' in sys.modules: # py2exe enabled? - try: - from py2exe.setuptools_buildexe import py2exe as _py2exe - except ImportError: - from py2exe.distutils_buildexe import py2exe as _py2exe - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["py2exe"] = cmd_py2exe - - # sdist farms its file list building out to egg_info - if 'egg_info' in cmds: - _egg_info = cmds['egg_info'] - else: - from setuptools.command.egg_info import egg_info as _egg_info - - class cmd_egg_info(_egg_info): - def find_sources(self): - # egg_info.find_sources builds the manifest list and writes it - # in one shot - super().find_sources() - - # Modify the filelist and normalize it - root = get_root() - cfg = get_config_from_root(root) - self.filelist.append('versioneer.py') - if cfg.versionfile_source: - # There are rare cases where versionfile_source might not be - # included by default, so we must be explicit - self.filelist.append(cfg.versionfile_source) - self.filelist.sort() - self.filelist.remove_duplicates() - - # The write method is hidden in the manifest_maker instance that - # generated the filelist and was thrown away - # We will instead replicate their final normalization (to unicode, - # and POSIX-style paths) - from setuptools import unicode_utils - normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/') - for f in self.filelist.files] - - manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt') - with open(manifest_filename, 'w') as fobj: - fobj.write('\n'.join(normalized)) - - cmds['egg_info'] = cmd_egg_info - - # we override different "sdist" commands for both environments - if 'sdist' in cmds: - _sdist = cmds['sdist'] - else: - from setuptools.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -OLD_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - -INIT_PY_SNIPPET = """ -from . import {0} -__version__ = {0}.get_versions()['version'] -""" - - -def do_setup(): - """Do main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except (OSError, configparser.NoSectionError, - configparser.NoOptionError) as e: - if isinstance(e, (OSError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except OSError: - old = "" - module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] - snippet = INIT_PY_SNIPPET.format(module) - if OLD_SNIPPET in old: - print(" replacing boilerplate in %s" % ipy) - with open(ipy, "w") as f: - f.write(old.replace(OLD_SNIPPET, snippet)) - elif snippet not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(snippet) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -def setup_command(): - """Set up Versioneer and exit with appropriate error code.""" - errors = do_setup() - errors += scan_setup_py() - sys.exit(1 if errors else 0) - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - setup_command()