From e68bea140c2bc5673da69eb4f8a0157b0c213703 Mon Sep 17 00:00:00 2001 From: Yannick Augenstein Date: Tue, 23 Sep 2025 12:36:28 +0200 Subject: [PATCH] feat(config): introduce profile-based configuration system --- CHANGELOG.md | 1 + docs/api/configuration.rst | 43 ++ docs/api/constants.rst | 4 +- docs/api/index.rst | 1 + docs/configuration/index.rst | 175 +++++++ docs/configuration/migration.rst | 48 ++ docs/extras/index.rst | 3 + docs/index.rst | 25 +- docs/install.rst | 20 +- poetry.lock | 377 +++++++++----- pyproject.toml | 1 + tests/config/conftest.py | 53 ++ tests/config/test_legacy.py | 20 + tests/config/test_loader.py | 138 +++++ tests/config/test_manager.py | 99 ++++ tests/config/test_plugins.py | 46 ++ tests/config/test_profiles.py | 25 + .../test_components/autograd/test_autograd.py | 13 +- tidy3d/components/autograd/constants.py | 31 -- .../components/autograd/derivative_utils.py | 53 +- tidy3d/components/geometry/base.py | 11 +- tidy3d/components/geometry/polyslab.py | 150 +++--- tidy3d/components/geometry/primitives.py | 14 +- tidy3d/components/structure.py | 11 +- tidy3d/config.py | 60 --- tidy3d/config/__init__.py | 62 +++ tidy3d/config/legacy.py | 324 ++++++++++++ tidy3d/config/loader.py | 317 ++++++++++++ tidy3d/config/manager.py | 477 ++++++++++++++++++ tidy3d/config/profiles.py | 59 +++ tidy3d/config/registry.py | 74 +++ tidy3d/config/sections.py | 352 +++++++++++++ tidy3d/config/serializer.py | 145 ++++++ tidy3d/packaging.py | 20 +- .../smatrix/component_modelers/base.py | 7 +- tidy3d/web/api/autograd/autograd.py | 59 ++- tidy3d/web/api/autograd/backward.py | 8 +- tidy3d/web/api/autograd/ops_backward.py | 8 +- tidy3d/web/cli/app.py | 49 +- tidy3d/web/cli/constants.py | 15 +- tidy3d/web/cli/migrate.py | 12 +- tidy3d/web/core/environment.py | 240 +-------- tidy3d/web/core/http_util.py | 28 +- 43 files changed, 2988 insertions(+), 690 deletions(-) create mode 100644 docs/api/configuration.rst create mode 100644 docs/configuration/index.rst create mode 100644 docs/configuration/migration.rst create mode 100644 tests/config/conftest.py create mode 100644 tests/config/test_legacy.py create mode 100644 tests/config/test_loader.py create mode 100644 tests/config/test_manager.py create mode 100644 tests/config/test_plugins.py create mode 100644 tests/config/test_profiles.py delete mode 100644 tidy3d/components/autograd/constants.py delete mode 100644 tidy3d/config.py create mode 100644 tidy3d/config/__init__.py create mode 100644 tidy3d/config/legacy.py create mode 100644 tidy3d/config/loader.py create mode 100644 tidy3d/config/manager.py create mode 100644 tidy3d/config/profiles.py create mode 100644 tidy3d/config/registry.py create mode 100644 tidy3d/config/sections.py create mode 100644 tidy3d/config/serializer.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 7edd63fcc2..4f1f1da887 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - A new type of doping box has been introduced, `CustomDoping` which accepts a `SpatialDataArray` to define doping concentration. Unlike in the case where a `SpatialDataArray`, custom doping defined with `CustomDoping` have additive behavior, i.e., one can add other doping on top. This deprecates the `SpatialDataArray` as direct input for `N_a` and `N_d`. - Non-isothermal Charge simulations are now available. One can now run this type of simulations by using the `SteadyChargeDCAnalysis` as the `analysis_spec` of a `HeatChargeSimulation`. This type of simulations couple the heat equation with the drift-diffusion equations which allow to account for self heating behavior. - Because non-isothermal Charge simulations are now supported, new models for the effective density of states and bandgap energy have been introduced. These models are the following: `ConstantEffectiveDOS`, `IsotropicEffectiveDOS`, `MultiValleyEffectiveDOS`, `DualValleyEffectiveDOS`. +- Introduced a profile-based configuration manager with TOML persistence and runtime overrides exposed via `tidy3d.config`. ### Changed - `LayerRefinementSpec` defaults to assuming structures made of different materials are interior-disjoint for more efficient mesh generation. diff --git a/docs/api/configuration.rst b/docs/api/configuration.rst new file mode 100644 index 0000000000..6410e87d6a --- /dev/null +++ b/docs/api/configuration.rst @@ -0,0 +1,43 @@ +Configuration API +================= + +.. currentmodule:: tidy3d.config + +The objects and helpers below expose the public configuration interface. + +Manager and Helpers +------------------- + +.. autosummary:: + :toctree: _autosummary/ + :template: module.rst + + ConfigManager + get_manager + reload_config + config + +Legacy Compatibility +-------------------- + +.. autosummary:: + :toctree: _autosummary/ + :template: module.rst + + LegacyConfigWrapper + Env + Environment + EnvironmentConfig + +Registration Utilities +---------------------- + +.. autosummary:: + :toctree: _autosummary/ + :template: module.rst + + register_section + register_plugin + register_handler + get_sections + get_handlers diff --git a/docs/api/constants.rst b/docs/api/constants.rst index 6a2c0ebca5..6123f0d784 100644 --- a/docs/api/constants.rst +++ b/docs/api/constants.rst @@ -35,7 +35,7 @@ Tidy3D Configuration :toctree: _autosummary/ :template: module.rst - tidy3d.config.Tidy3dConfig + tidy3d.config.config Default Absorber Parameters ---------------------------- @@ -92,4 +92,4 @@ Precision & Comparator Values tidy3d.constants.fp_eps tidy3d.constants.pec_val tidy3d.constants.LARGE_NUMBER - tidy3d.constants.GLANCING_CUTOFF \ No newline at end of file + tidy3d.constants.GLANCING_CUTOFF diff --git a/docs/api/index.rst b/docs/api/index.rst index bb46ae705c..9a77908bd4 100644 --- a/docs/api/index.rst +++ b/docs/api/index.rst @@ -18,6 +18,7 @@ API |:computer:| output_data analytic_beams utilities + configuration mesh/index heat/index charge/index diff --git a/docs/configuration/index.rst b/docs/configuration/index.rst new file mode 100644 index 0000000000..5b2745aa0e --- /dev/null +++ b/docs/configuration/index.rst @@ -0,0 +1,175 @@ +Configuration Guide |:gear:| +============================ + +.. highlight:: python + +Working with cloud simulations usually requires a handful of settings such as +your API key, the active environment, and any local tweaks you make while +experimenting. The ``tidy3d.config`` module keeps all of this in one place +through a single object called ``config``. This page explains how it behaves, +where values are stored, and how to keep your changes consistent across +sessions. + +Getting Started +--------------- + +Most users only need the following import:: + + from tidy3d.config import config + +You can then read or update sections just like attributes:: + + # read values + print(config.web.api_endpoint) + print(config.logging.level) + + # update values + config.logging.level = "DEBUG" + config.web.timeout = 60 + config.save() + +The ``save()`` call writes your edits to disk so the same settings load the +next time you import ``tidy3d``. + +Where Settings Are Stored +------------------------- + +Tidy3D chooses a configuration directory the first time you import the module. +The location depends on your operating system: + +.. list-table:: Default configuration directory + :widths: 30 70 + :header-rows: 1 + + * - Platform + - Path + * - macOS / Linux + - ``~/.config/tidy3d`` + * - Windows + - ``C:\\Users\\\\.config\\tidy3d`` + +You can override this by setting the ``TIDY3D_BASE_DIR`` environment variable +*before* importing ``tidy3d``. When it is present, the config files are kept in +``/.tidy3d``. If the chosen location is not writable, Tidy3D falls back to +a temporary directory and warns that the settings will not persist. + +Files Inside the Directory +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- ``config.toml`` – base settings shared by all profiles. +- ``profiles/.toml`` – optional overrides for custom profiles. Each file + only contains the differences from the base settings. + +Priority Order +-------------- + +Whenever you read ``config.
.``, the value comes from the highest +priority source in the list below. Lower entries only apply when the ones above +them do not set a value. + +1. Runtime changes you make in the current Python session. +2. Environment variables (``TIDY3D_
__``). +3. Profile overrides from ``profiles/.toml``. +4. The base ``config.toml`` file. +5. Built-in profiles (for example ``prod`` and ``dev``) bundled with Tidy3D. +6. Default values defined by the software. + +This means environment variables always win over ``config.toml``, and any +attribute you set in code wins over everything else until you discard it or +call ``save()``. + +Making Changes That Last +------------------------ + +Runtime Updates +~~~~~~~~~~~~~~~ + +Assignments like ``config.logging.level = "INFO"`` apply immediately but only +live in memory. They affect new simulations started in the same interpreter but +disappear when the process exits. + +Saving to Disk +~~~~~~~~~~~~~~ + +Call ``config.save()`` to write the current profile to disk. The method removes +environment-variable overrides automatically so you never persist an API key or +other secret that was loaded from the shell. To store the full set of values, +including defaults, pass ``include_defaults=True``:: + + config.save(include_defaults=True) + +Profiles +-------- + +Tidy3D ships with built-in profiles such as ``prod`` and ``dev``. Switch between +them with:: + + config.switch_profile("dev") + +To create your own profile, switch to a new name, edit settings, then call +``save()``:: + + config.switch_profile("customer") + config.web.api_endpoint = "https://example.com" + config.save() + +This writes ``profiles/customer.toml`` containing only the adjustments you +made. Use ``config.profiles.list()`` to discover available built-in and user +profiles. + +Environment Variables +--------------------- + +Environment variables let you override individual options without touching any +files. The naming pattern is ``TIDY3D_
__``, for example:: + + export TIDY3D_LOGGING__LEVEL=WARNING + +Supported variables take effect the next time you import ``tidy3d``. Remove a +variable or clear the shell environment to restore the lower priority setting. + +Plugin Settings +--------------- + +Plugins can register their own sections so their options appear under +``config.plugins``. A typical plugin exposes a model decorated with +``@register_plugin``:: + + from tidy3d.config.sections import ConfigSection + from tidy3d.config import register_plugin + + @register_plugin("sample") + class SamplePluginConfig(ConfigSection): + enabled: bool = False + threshold: float = 0.5 + +After the plugin is imported, you can read or modify its settings with:: + + config.plugins.sample.enabled = True + config.save() + +If the plugin registers later in the session, previously saved values are +loaded automatically. + +Command Line Helpers +-------------------- + +Use ``tidy3d configure`` to store your API key in ``config.toml``. The command +creates the directory if it is missing and updates only the ``web`` section. + +If you have older files in ``~/.tidy3d``, run ``tidy3d config migrate`` to move +them into the new location described above. + +Legacy Access Points +-------------------- + +Older code paths such as ``tidy3d.config.logging_level`` and ``tidy3d.config.Env`` +still work. They emit a ``DeprecationWarning`` each time you use them to help +you transition to the modern interface. See :doc:`migration` for advice on +updating scripts that depend on these names. + +Next Steps +---------- + +- :doc:`migration` +- :doc:`../api/configuration` diff --git a/docs/configuration/migration.rst b/docs/configuration/migration.rst new file mode 100644 index 0000000000..aa9c805f7e --- /dev/null +++ b/docs/configuration/migration.rst @@ -0,0 +1,48 @@ +Upgrading Existing Setups +========================= + +This short note highlights the differences you may notice when moving from +earlier versions of Tidy3D to the current configuration manager. + +File Locations +-------------- + +- Previous releases stored settings in ``~/.tidy3d`` on all platforms. The new + manager now prefers the platform-specific paths described in + :doc:`index`. +- Your existing ``~/.tidy3d/config.toml`` is still respected. Run + ``tidy3d config migrate`` if you would like to copy it into the new directory. + +Environment Switching +--------------------- + +- The ``Env`` helper remains available. Calls such as ``Env.dev.active()`` now + forward to the new manager and produce a ``DeprecationWarning`` to encourage + the modern API, e.g. ``config.switch_profile("dev")``. + +Legacy Attributes +----------------- + +- Shorthand properties ``config.logging_level``, ``config.log_suppression``, + and ``config.use_local_subpixel`` still work and set the equivalent fields in + ``config.logging`` or ``config.simulation``. Each call raises a warning so + you can update scripts at your own pace. + +Working Safely With Existing Scripts +------------------------------------ + +- Prefer the new attribute paths (``config.logging.level``) in fresh code. +- When editing older notebooks, import ``warnings`` and silence specific + deprecations temporarily if needed:: + + import warnings + warnings.filterwarnings("ignore", category=DeprecationWarning) + +- After updating your scripts, remove the filter so you notice future changes. + +Need Help? +---------- + +Reach out through the `Tidy3D Discussions board `_ +or contact support@flexcompute.com if you hit any issues while upgrading. + diff --git a/docs/extras/index.rst b/docs/extras/index.rst index 2367c79e36..3e874f7a76 100644 --- a/docs/extras/index.rst +++ b/docs/extras/index.rst @@ -66,6 +66,9 @@ You can check whether local subpixel averaging is turned on:: tidy3d.packaging.tidy3d_extras["use_local_subpixel"] +For a broader overview of configuration options and how they are stored, see +:doc:`../configuration/index`. + Licenses -------- diff --git a/docs/index.rst b/docs/index.rst index 09da6e33bf..6c1fea6439 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -52,9 +52,9 @@ Get Started tidy3d configure --apikey=XXX - And enter your API key when prompted. - - For more detailed installation instructions, see `this page <./install.html>`_. + For more detailed installation instructions, see `this page <./install.html>`_, + and refer to :doc:`configuration/index` if you would like to fine-tune your + settings. .. group-tab:: On Windows |:window:| @@ -78,19 +78,9 @@ Get Started If you're running into trouble, you may need to manually set the API key directly in the configuration file where Tidy3D looks for it. You need to place the ``$HOME/.tidy3d/config`` file in your home directory such as ``C:\Users\username\`` (where ``username`` is your username). - The API key must be in a file called ``$HOME/.tidy3d/config`` located in your home directory, with the following contents - - .. code-block:: bash - - apikey = "XXX" - - You can manually set up your file like this, or do it through the command line line: - - .. code-block:: bash - - echo 'apikey = "XXX"' > ~/.tidy3d/config - - Note the quotes around `XXX`. + The ``tidy3d configure`` command stores the API key for you. If you prefer + to manage the file yourself, see :doc:`configuration/index` for the current + location and format on each platform. .. group-tab:: In the Cloud |:cloud:| @@ -249,6 +239,7 @@ Contents :maxdepth: 2 install + configuration/index lectures/index notebooks/docs/index faq/docs/index @@ -260,5 +251,3 @@ Contents About our Solver - - diff --git a/docs/install.rst b/docs/install.rst index 38930ae85f..4f6bf0f5ed 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -78,23 +78,9 @@ Alternatively, the API key can be set up using the environment variable ``SIMCLO export SIMCLOUD_APIKEY="XXX" -Finally, one may manually set the API key directly in the configuration file where Tidy3D looks for it. - -The API key must be in a file called ``.tidy3d/config`` located in your home directory, with the following contents - -.. code-block:: python - - apikey = "XXX" - -You can manually set up your file like this, or do it through the command line line: - -.. code-block:: python - - echo 'apikey = "XXX"' > ~/.tidy3d/config - -Note the quotes around `XXX`. - -Note that Windows users will most likely need to place the ``.tidy3d/config`` file in their ``C:\Users\username\`` directory (where ``username`` is your username). +Finally, one may manually set the API key directly in the configuration file +where Tidy3D looks for it. The path and file format differ slightly between +platforms; see :doc:`configuration/index` for the up-to-date layout. diff --git a/poetry.lock b/poetry.lock index 51835bb611..6e717397e0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "absl-py" @@ -400,18 +400,18 @@ css = ["tinycss2 (>=1.1.0,<1.5)"] [[package]] name = "boto3" -version = "1.40.36" +version = "1.40.39" description = "The AWS SDK for Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "boto3-1.40.36-py3-none-any.whl", hash = "sha256:d7c1fe033f491f560cd26022a9dcf28baf877ae854f33bc64fffd0df3b9c98be"}, - {file = "boto3-1.40.36.tar.gz", hash = "sha256:bfc1f3d5c4f5d12b8458406b8972f8794ac57e2da1ee441469e143bc0440a5c3"}, + {file = "boto3-1.40.39-py3-none-any.whl", hash = "sha256:e2cab5606269fe9f428981892aa592b7e0c087a038774475fa4cd6c8b5fe0a99"}, + {file = "boto3-1.40.39.tar.gz", hash = "sha256:27ca06d4d6f838b056b4935c9eceb92c8d125dbe0e895c5583bcf7130627dcd2"}, ] [package.dependencies] -botocore = ">=1.40.36,<1.41.0" +botocore = ">=1.40.39,<1.41.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.14.0,<0.15.0" @@ -420,14 +420,14 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.40.36" +version = "1.40.39" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "botocore-1.40.36-py3-none-any.whl", hash = "sha256:d6edf75875e4013cb7078875a1d6c289afb4cc6675d99d80700c692d8d8e0b72"}, - {file = "botocore-1.40.36.tar.gz", hash = "sha256:93386a8dc54173267ddfc6cd8636c9171e021f7c032aa1df3af7de816e3df616"}, + {file = "botocore-1.40.39-py3-none-any.whl", hash = "sha256:144e0e887a9fc198c6772f660fc006028bd1a9ce5eea3caddd848db3e421bc79"}, + {file = "botocore-1.40.39.tar.gz", hash = "sha256:c6efc55cac341811ba90c693d20097db6e2ce903451d94496bccd3f672b1709d"}, ] [package.dependencies] @@ -470,7 +470,7 @@ description = "Foreign Function Interface for Python calling C code." optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"dev\" or extra == \"docs\"" +markers = "(extra == \"dev\" or extra == \"docs\") and implementation_name == \"pypy\"" files = [ {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, @@ -683,7 +683,7 @@ description = "Chex: Testing made fun, in JAX!" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version == \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "chex-0.1.90-py3-none-any.whl", hash = "sha256:fce3de82588f72d4796e545e574a433aa29229cbdcf792555e41bead24b704ae"}, {file = "chex-0.1.90.tar.gz", hash = "sha256:d3c375aeb6154b08f1cccd2bee4ed83659ee2198a6acf1160d2fe2e4a6c87b5c"}, @@ -805,7 +805,7 @@ description = "Python library for calculating contours of 2D quadrilateral grids optional = false python-versions = ">=3.10" groups = ["main"] -markers = "python_version == \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "contourpy-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba38e3f9f330af820c4b27ceb4b9c7feee5fe0493ea53a8720f4792667465934"}, {file = "contourpy-1.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc41ba0714aa2968d1f8674ec97504a8f7e334f48eeacebcaa6256213acb0989"}, @@ -1220,15 +1220,15 @@ files = [ [[package]] name = "diff-cover" -version = "9.6.0" +version = "9.7.0" description = "Run coverage and linting reports on diffs" optional = true python-versions = ">=3.9" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "diff_cover-9.6.0-py3-none-any.whl", hash = "sha256:29fbeb52d77a0b8c811e5580d5dbf41801a838da2ed54319a599da8f7233c547"}, - {file = "diff_cover-9.6.0.tar.gz", hash = "sha256:75e5bc056dcaa68c6c87c9fb4e07c9e60daef15b6e8d034d56d2da9e2c84a872"}, + {file = "diff_cover-9.7.0-py3-none-any.whl", hash = "sha256:64a477c3c01e2da42153b0fae0655052015d3d3b4805f98bd4d25c373c9b911b"}, + {file = "diff_cover-9.7.0.tar.gz", hash = "sha256:8495949f878a29e6e2fc89094db4d67691b10c23487f7e1993d8bc505e91213b"}, ] [package.dependencies] @@ -1341,7 +1341,7 @@ description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["main"] -markers = "python_version == \"3.10\" and (extra == \"dev\" or extra == \"docs\")" +markers = "python_version < \"3.11\" and (extra == \"dev\" or extra == \"docs\")" files = [ {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, @@ -1427,7 +1427,7 @@ description = "A platform independent file lock." optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"dev\" or extra == \"pytorch\" or extra == \"docs\"" +markers = "sys_platform == \"darwin\" and (extra == \"dev\" or extra == \"pytorch\" or extra == \"docs\") or extra == \"dev\" or extra == \"docs\" or extra == \"pytorch\"" files = [ {file = "filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d"}, {file = "filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58"}, @@ -1440,7 +1440,7 @@ description = "Flax: A neural network library for JAX designed for flexibility" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version == \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "flax-0.10.7-py3-none-any.whl", hash = "sha256:4033223a9a9969ba0b252e085e9714d0a1e9124ac300aaf48e92c40769c420f6"}, {file = "flax-0.10.7.tar.gz", hash = "sha256:2930d6671e23076f6db3b96afacf45c5060898f5c189ecab6dda7e05d26c2085"}, @@ -2030,7 +2030,7 @@ description = "IPython: Productive Interactive Computing" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version == \"3.10\" and (extra == \"dev\" or extra == \"docs\")" +markers = "python_version < \"3.11\" and (extra == \"dev\" or extra == \"docs\")" files = [ {file = "ipython-8.37.0-py3-none-any.whl", hash = "sha256:ed87326596b878932dbcb171e3e698845434d8c61b8d8cd474bf663041a9dcf2"}, {file = "ipython-8.37.0.tar.gz", hash = "sha256:ca815841e1a41a1e6b73a0b08f3038af9b2252564d01fc405356d34033012216"}, @@ -2176,7 +2176,7 @@ description = "Differentiate, compile, and transform Numpy code." optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version == \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "jax-0.6.2-py3-none-any.whl", hash = "sha256:bb24a82dc60ccf704dcaf6dbd07d04957f68a6c686db19630dd75260d1fb788c"}, {file = "jax-0.6.2.tar.gz", hash = "sha256:a437d29038cbc8300334119692744704ca7941490867b9665406b7f90665cd96"}, @@ -2240,7 +2240,7 @@ description = "XLA library for JAX" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version == \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "jaxlib-0.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:da4601b2b5dc8c23d6afb293eacfb9aec4e1d1871cb2f29c5a151d103e73b0f8"}, {file = "jaxlib-0.6.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:4205d098ce8efb5f7fe2fe5098bae6036094dc8d8829f5e0e0d7a9b155326336"}, @@ -2352,7 +2352,7 @@ description = "A very fast and expressive template engine." optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"dev\" or extra == \"pytorch\" or extra == \"docs\"" +markers = "sys_platform == \"darwin\" and (extra == \"dev\" or extra == \"pytorch\" or extra == \"docs\") or extra == \"dev\" or extra == \"docs\" or extra == \"pytorch\"" files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -2682,15 +2682,15 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.4.7" +version = "4.4.8" description = "JupyterLab computational environment" optional = true python-versions = ">=3.9" groups = ["main"] markers = "extra == \"dev\" or extra == \"docs\"" files = [ - {file = "jupyterlab-4.4.7-py3-none-any.whl", hash = "sha256:808bae6136b507a4d18f04254218bfe71ed8ba399a36ef3280d5f259e69abf80"}, - {file = "jupyterlab-4.4.7.tar.gz", hash = "sha256:8c8e225492f4513ebde9bbbc00a05b651ab9a1f5b0013015d96fabf671c37188"}, + {file = "jupyterlab-4.4.8-py3-none-any.whl", hash = "sha256:81b56f33f35be15150e7ccd43440963a93d2b115ffa614a06d38b91e4d650f92"}, + {file = "jupyterlab-4.4.8.tar.gz", hash = "sha256:a89e5a2e9f9295ae039356fc5247e5bfac64936126ab805e3ff8e47f385b0c7e"}, ] [package.dependencies] @@ -2983,7 +2983,7 @@ description = "Safely add untrusted strings to HTML/XML markup." optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"dev\" or extra == \"pytorch\" or extra == \"docs\"" +markers = "sys_platform == \"darwin\" and (extra == \"dev\" or extra == \"pytorch\" or extra == \"docs\") or extra == \"dev\" or extra == \"docs\" or extra == \"pytorch\"" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -3536,6 +3536,27 @@ traitlets = ">=5.1" docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] test = ["pep440", "pre-commit", "pytest", "testpath"] +[[package]] +name = "nbsphinx" +version = "0.9.6" +description = "Jupyter Notebook Tools for Sphinx" +optional = true +python-versions = ">=3.6" +groups = ["main"] +markers = "python_version >= \"3.11\" and (extra == \"dev\" or extra == \"docs\")" +files = [ + {file = "nbsphinx-0.9.6-py3-none-any.whl", hash = "sha256:336b0b557945a7678ec7449b16449f854bc852a435bb53b8a72e6b5dc740d992"}, + {file = "nbsphinx-0.9.6.tar.gz", hash = "sha256:c2b28a2d702f1159a95b843831798e86e60a17fc647b9bff9ba1585355de54e3"}, +] + +[package.dependencies] +docutils = ">=0.18.1" +jinja2 = "*" +nbconvert = ">=5.3,<5.4 || >5.4" +nbformat = "*" +sphinx = ">=1.8" +traitlets = ">=5" + [[package]] name = "nbsphinx" version = "0.9.7" @@ -3543,7 +3564,7 @@ description = "Jupyter Notebook Tools for Sphinx" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"dev\" or extra == \"docs\"" +markers = "python_version < \"3.11\" and (extra == \"dev\" or extra == \"docs\")" files = [ {file = "nbsphinx-0.9.7-py3-none-any.whl", hash = "sha256:7292c3767fea29e405c60743eee5393682a83982ab202ff98f5eb2db02629da8"}, {file = "nbsphinx-0.9.7.tar.gz", hash = "sha256:abd298a686d55fa894ef697c51d44f24e53aa312dadae38e82920f250a5456fe"}, @@ -3576,7 +3597,7 @@ description = "Python package for creating and manipulating graphs and networks" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"dev\" or extra == \"pytorch\" or extra == \"trimesh\" or extra == \"docs\"" +markers = "sys_platform == \"darwin\" and (extra == \"dev\" or extra == \"pytorch\" or extra == \"trimesh\" or extra == \"docs\") or extra == \"dev\" or extra == \"trimesh\" or extra == \"docs\" or extra == \"pytorch\"" files = [ {file = "networkx-2.8.8-py3-none-any.whl", hash = "sha256:e435dfa75b1d7195c7b8378c3859f0445cd88c6b0375c181ed66823a9ceb7524"}, {file = "networkx-2.8.8.tar.gz", hash = "sha256:230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"}, @@ -3653,7 +3674,7 @@ description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "python_version == \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb"}, {file = "numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90"}, @@ -4188,7 +4209,7 @@ description = "A decorator to automatically detect mismatch when overriding a me optional = true python-versions = ">=3.6" groups = ["main"] -markers = "python_version < \"3.12\" and (extra == \"dev\" or extra == \"docs\")" +markers = "python_version <= \"3.11\" and (extra == \"dev\" or extra == \"docs\")" files = [ {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, @@ -4633,7 +4654,7 @@ description = "Run a subprocess in a pseudo terminal" optional = true python-versions = "*" groups = ["main"] -markers = "(sys_platform != \"win32\" and sys_platform != \"emscripten\" or os_name != \"nt\") and (extra == \"dev\" or extra == \"docs\")" +markers = "(extra == \"dev\" or extra == \"docs\") and (sys_platform != \"win32\" and sys_platform != \"emscripten\" or os_name != \"nt\")" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -4662,7 +4683,7 @@ description = "C parser in Python" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(extra == \"dev\" or extra == \"docs\") and implementation_name != \"PyPy\"" +markers = "(extra == \"dev\" or extra == \"docs\") and implementation_name == \"pypy\"" files = [ {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, @@ -5201,65 +5222,78 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, ] [[package]] @@ -5496,6 +5530,23 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.1 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +description = "Manipulate well-formed Roman numerals" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.11\" and (extra == \"dev\" or extra == \"docs\")" +files = [ + {file = "roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c"}, + {file = "roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d"}, +] + +[package.extras] +lint = ["mypy (==1.15.0)", "pyright (==1.1.394)", "ruff (==0.9.7)"] +test = ["pytest (>=8)"] + [[package]] name = "rpds-py" version = "0.27.1" @@ -5855,7 +5906,7 @@ description = "Fundamental algorithms for scientific computing in Python" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "python_version == \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a345928c86d535060c9c2b25e71e87c39ab2f22fc96e9636bd74d1dbf9de448c"}, {file = "scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ad3432cb0f9ed87477a8d97f03b763fd1d57709f1bbde3c9369b1dff5503b253"}, @@ -6018,7 +6069,7 @@ description = "Easily download, build, install, upgrade, and uninstall Python pa optional = true python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"dev\" or extra == \"pytorch\" or extra == \"docs\") or extra == \"dev\" or extra == \"docs\" or python_version >= \"3.12\" and (extra == \"dev\" or extra == \"docs\" or extra == \"pytorch\")" +markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"dev\" or extra == \"pytorch\" or extra == \"docs\") or (extra == \"dev\" or extra == \"docs\") and (extra == \"dev\" or extra == \"pytorch\" or extra == \"docs\") or python_version >= \"3.12\" and (extra == \"dev\" or extra == \"pytorch\" or extra == \"docs\")" files = [ {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, @@ -6035,53 +6086,69 @@ type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.deve [[package]] name = "shapely" -version = "2.1.1" +version = "2.1.2" description = "Manipulation and analysis of geometric objects" optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "shapely-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d8ccc872a632acb7bdcb69e5e78df27213f7efd195882668ffba5405497337c6"}, - {file = "shapely-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f24f2ecda1e6c091da64bcbef8dd121380948074875bd1b247b3d17e99407099"}, - {file = "shapely-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45112a5be0b745b49e50f8829ce490eb67fefb0cea8d4f8ac5764bfedaa83d2d"}, - {file = "shapely-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c10ce6f11904d65e9bbb3e41e774903c944e20b3f0b282559885302f52f224a"}, - {file = "shapely-2.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:61168010dfe4e45f956ffbbaf080c88afce199ea81eb1f0ac43230065df320bd"}, - {file = "shapely-2.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cacf067cdff741cd5c56a21c52f54ece4e4dad9d311130493a791997da4a886b"}, - {file = "shapely-2.1.1-cp310-cp310-win32.whl", hash = "sha256:23b8772c3b815e7790fb2eab75a0b3951f435bc0fce7bb146cb064f17d35ab4f"}, - {file = "shapely-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:2c7b2b6143abf4fa77851cef8ef690e03feade9a0d48acd6dc41d9e0e78d7ca6"}, - {file = "shapely-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:587a1aa72bc858fab9b8c20427b5f6027b7cbc92743b8e2c73b9de55aa71c7a7"}, - {file = "shapely-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fa5c53b0791a4b998f9ad84aad456c988600757a96b0a05e14bba10cebaaaea"}, - {file = "shapely-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aabecd038841ab5310d23495253f01c2a82a3aedae5ab9ca489be214aa458aa7"}, - {file = "shapely-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586f6aee1edec04e16227517a866df3e9a2e43c1f635efc32978bb3dc9c63753"}, - {file = "shapely-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b9878b9e37ad26c72aada8de0c9cfe418d9e2ff36992a1693b7f65a075b28647"}, - {file = "shapely-2.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9a531c48f289ba355e37b134e98e28c557ff13965d4653a5228d0f42a09aed0"}, - {file = "shapely-2.1.1-cp311-cp311-win32.whl", hash = "sha256:4866de2673a971820c75c0167b1f1cd8fb76f2d641101c23d3ca021ad0449bab"}, - {file = "shapely-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:20a9d79958b3d6c70d8a886b250047ea32ff40489d7abb47d01498c704557a93"}, - {file = "shapely-2.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2827365b58bf98efb60affc94a8e01c56dd1995a80aabe4b701465d86dcbba43"}, - {file = "shapely-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9c551f7fa7f1e917af2347fe983f21f212863f1d04f08eece01e9c275903fad"}, - {file = "shapely-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78dec4d4fbe7b1db8dc36de3031767e7ece5911fb7782bc9e95c5cdec58fb1e9"}, - {file = "shapely-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:872d3c0a7b8b37da0e23d80496ec5973c4692920b90de9f502b5beb994bbaaef"}, - {file = "shapely-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e2b9125ebfbc28ecf5353511de62f75a8515ae9470521c9a693e4bb9fbe0cf1"}, - {file = "shapely-2.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4b96cea171b3d7f6786976a0520f178c42792897653ecca0c5422fb1e6946e6d"}, - {file = "shapely-2.1.1-cp312-cp312-win32.whl", hash = "sha256:39dca52201e02996df02e447f729da97cfb6ff41a03cb50f5547f19d02905af8"}, - {file = "shapely-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:13d643256f81d55a50013eff6321142781cf777eb6a9e207c2c9e6315ba6044a"}, - {file = "shapely-2.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3004a644d9e89e26c20286d5fdc10f41b1744c48ce910bd1867fdff963fe6c48"}, - {file = "shapely-2.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1415146fa12d80a47d13cfad5310b3c8b9c2aa8c14a0c845c9d3d75e77cb54f6"}, - {file = "shapely-2.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21fcab88b7520820ec16d09d6bea68652ca13993c84dffc6129dc3607c95594c"}, - {file = "shapely-2.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5ce6a5cc52c974b291237a96c08c5592e50f066871704fb5b12be2639d9026a"}, - {file = "shapely-2.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:04e4c12a45a1d70aeb266618d8cf81a2de9c4df511b63e105b90bfdfb52146de"}, - {file = "shapely-2.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6ca74d851ca5264aae16c2b47e96735579686cb69fa93c4078070a0ec845b8d8"}, - {file = "shapely-2.1.1-cp313-cp313-win32.whl", hash = "sha256:fd9130501bf42ffb7e0695b9ea17a27ae8ce68d50b56b6941c7f9b3d3453bc52"}, - {file = "shapely-2.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:ab8d878687b438a2f4c138ed1a80941c6ab0029e0f4c785ecfe114413b498a97"}, - {file = "shapely-2.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0c062384316a47f776305ed2fa22182717508ffdeb4a56d0ff4087a77b2a0f6d"}, - {file = "shapely-2.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4ecf6c196b896e8f1360cc219ed4eee1c1e5f5883e505d449f263bd053fb8c05"}, - {file = "shapely-2.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb00070b4c4860f6743c600285109c273cca5241e970ad56bb87bef0be1ea3a0"}, - {file = "shapely-2.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d14a9afa5fa980fbe7bf63706fdfb8ff588f638f145a1d9dbc18374b5b7de913"}, - {file = "shapely-2.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b640e390dabde790e3fb947198b466e63223e0a9ccd787da5f07bcb14756c28d"}, - {file = "shapely-2.1.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:69e08bf9697c1b73ec6aa70437db922bafcea7baca131c90c26d59491a9760f9"}, - {file = "shapely-2.1.1-cp313-cp313t-win32.whl", hash = "sha256:ef2d09d5a964cc90c2c18b03566cf918a61c248596998a0301d5b632beadb9db"}, - {file = "shapely-2.1.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8cb8f17c377260452e9d7720eeaf59082c5f8ea48cf104524d953e5d36d4bdb7"}, - {file = "shapely-2.1.1.tar.gz", hash = "sha256:500621967f2ffe9642454808009044c21e5b35db89ce69f8a2042c2ffd0e2772"}, + {file = "shapely-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7ae48c236c0324b4e139bea88a306a04ca630f49be66741b340729d380d8f52f"}, + {file = "shapely-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eba6710407f1daa8e7602c347dfc94adc02205ec27ed956346190d66579eb9ea"}, + {file = "shapely-2.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ef4a456cc8b7b3d50ccec29642aa4aeda959e9da2fe9540a92754770d5f0cf1f"}, + {file = "shapely-2.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e38a190442aacc67ff9f75ce60aec04893041f16f97d242209106d502486a142"}, + {file = "shapely-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:40d784101f5d06a1fd30b55fc11ea58a61be23f930d934d86f19a180909908a4"}, + {file = "shapely-2.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f6f6cd5819c50d9bcf921882784586aab34a4bd53e7553e175dece6db513a6f0"}, + {file = "shapely-2.1.2-cp310-cp310-win32.whl", hash = "sha256:fe9627c39c59e553c90f5bc3128252cb85dc3b3be8189710666d2f8bc3a5503e"}, + {file = "shapely-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:1d0bfb4b8f661b3b4ec3565fa36c340bfb1cda82087199711f86a88647d26b2f"}, + {file = "shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618"}, + {file = "shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d"}, + {file = "shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09"}, + {file = "shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26"}, + {file = "shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7"}, + {file = "shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2"}, + {file = "shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6"}, + {file = "shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc"}, + {file = "shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94"}, + {file = "shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359"}, + {file = "shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3"}, + {file = "shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b"}, + {file = "shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc"}, + {file = "shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d"}, + {file = "shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454"}, + {file = "shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179"}, + {file = "shapely-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b510dda1a3672d6879beb319bc7c5fd302c6c354584690973c838f46ec3e0fa8"}, + {file = "shapely-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8cff473e81017594d20ec55d86b54bc635544897e13a7cfc12e36909c5309a2a"}, + {file = "shapely-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe7b77dc63d707c09726b7908f575fc04ff1d1ad0f3fb92aec212396bc6cfe5e"}, + {file = "shapely-2.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ed1a5bbfb386ee8332713bf7508bc24e32d24b74fc9a7b9f8529a55db9f4ee6"}, + {file = "shapely-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a84e0582858d841d54355246ddfcbd1fce3179f185da7470f41ce39d001ee1af"}, + {file = "shapely-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc3487447a43d42adcdf52d7ac73804f2312cbfa5d433a7d2c506dcab0033dfd"}, + {file = "shapely-2.1.2-cp313-cp313-win32.whl", hash = "sha256:9c3a3c648aedc9f99c09263b39f2d8252f199cb3ac154fadc173283d7d111350"}, + {file = "shapely-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:ca2591bff6645c216695bdf1614fca9c82ea1144d4a7591a466fef64f28f0715"}, + {file = "shapely-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2d93d23bdd2ed9dc157b46bc2f19b7da143ca8714464249bef6771c679d5ff40"}, + {file = "shapely-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:01d0d304b25634d60bd7cf291828119ab55a3bab87dc4af1e44b07fb225f188b"}, + {file = "shapely-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8d8382dd120d64b03698b7298b89611a6ea6f55ada9d39942838b79c9bc89801"}, + {file = "shapely-2.1.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:19efa3611eef966e776183e338b2d7ea43569ae99ab34f8d17c2c054d3205cc0"}, + {file = "shapely-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:346ec0c1a0fcd32f57f00e4134d1200e14bf3f5ae12af87ba83ca275c502498c"}, + {file = "shapely-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6305993a35989391bd3476ee538a5c9a845861462327efe00dd11a5c8c709a99"}, + {file = "shapely-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:c8876673449f3401f278c86eb33224c5764582f72b653a415d0e6672fde887bf"}, + {file = "shapely-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:4a44bc62a10d84c11a7a3d7c1c4fe857f7477c3506e24c9062da0db0ae0c449c"}, + {file = "shapely-2.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:9a522f460d28e2bf4e12396240a5fc1518788b2fcd73535166d748399ef0c223"}, + {file = "shapely-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1ff629e00818033b8d71139565527ced7d776c269a49bd78c9df84e8f852190c"}, + {file = "shapely-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f67b34271dedc3c653eba4e3d7111aa421d5be9b4c4c7d38d30907f796cb30df"}, + {file = "shapely-2.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21952dc00df38a2c28375659b07a3979d22641aeb104751e769c3ee825aadecf"}, + {file = "shapely-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1f2f33f486777456586948e333a56ae21f35ae273be99255a191f5c1fa302eb4"}, + {file = "shapely-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cf831a13e0d5a7eb519e96f58ec26e049b1fad411fc6fc23b162a7ce04d9cffc"}, + {file = "shapely-2.1.2-cp314-cp314-win32.whl", hash = "sha256:61edcd8d0d17dd99075d320a1dd39c0cb9616f7572f10ef91b4b5b00c4aeb566"}, + {file = "shapely-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:a444e7afccdb0999e203b976adb37ea633725333e5b119ad40b1ca291ecf311c"}, + {file = "shapely-2.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5ebe3f84c6112ad3d4632b1fd2290665aa75d4cef5f6c5d77c4c95b324527c6a"}, + {file = "shapely-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5860eb9f00a1d49ebb14e881f5caf6c2cf472c7fd38bd7f253bbd34f934eb076"}, + {file = "shapely-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b705c99c76695702656327b819c9660768ec33f5ce01fa32b2af62b56ba400a1"}, + {file = "shapely-2.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a1fd0ea855b2cf7c9cddaf25543e914dd75af9de08785f20ca3085f2c9ca60b0"}, + {file = "shapely-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:df90e2db118c3671a0754f38e36802db75fe0920d211a27481daf50a711fdf26"}, + {file = "shapely-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:361b6d45030b4ac64ddd0a26046906c8202eb60d0f9f53085f5179f1d23021a0"}, + {file = "shapely-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:b54df60f1fbdecc8ebc2c5b11870461a6417b3d617f555e5033f1505d36e5735"}, + {file = "shapely-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:0036ac886e0923417932c2e6369b6c52e38e0ff5d9120b90eef5cd9a5fc5cae9"}, + {file = "shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9"}, ] [package.dependencies] @@ -6304,7 +6371,7 @@ description = "Python documentation generator" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "extra == \"dev\" or extra == \"docs\"" +markers = "python_version < \"3.11\" and (extra == \"dev\" or extra == \"docs\")" files = [ {file = "sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2"}, {file = "sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927"}, @@ -6334,6 +6401,43 @@ docs = ["sphinxcontrib-websupport"] lint = ["flake8 (>=6.0)", "mypy (==1.11.1)", "pyright (==1.1.384)", "pytest (>=6.0)", "ruff (==0.6.9)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.18.0.20240506)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241005)", "types-requests (==2.32.0.20240914)", "types-urllib3 (==1.26.25.14)"] test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] +[[package]] +name = "sphinx" +version = "8.2.3" +description = "Python documentation generator" +optional = true +python-versions = ">=3.11" +groups = ["main"] +markers = "python_version >= \"3.11\" and (extra == \"dev\" or extra == \"docs\")" +files = [ + {file = "sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3"}, + {file = "sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348"}, +] + +[package.dependencies] +alabaster = ">=0.7.14" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +roman-numerals-py = ">=1.0.0" +snowballstemmer = ">=2.2" +sphinxcontrib-applehelp = ">=1.0.7" +sphinxcontrib-devhelp = ">=1.0.6" +sphinxcontrib-htmlhelp = ">=2.0.6" +sphinxcontrib-jsmath = ">=1.0.1" +sphinxcontrib-qthelp = ">=1.0.6" +sphinxcontrib-serializinghtml = ">=1.1.9" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["betterproto (==2.0.0b6)", "mypy (==1.15.0)", "pypi-attestations (==0.0.21)", "pyright (==1.1.395)", "pytest (>=8.0)", "ruff (==0.9.9)", "sphinx-lint (>=0.9)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.19.0.20250219)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241128)", "types-requests (==2.32.0.20241016)", "types-urllib3 (==1.26.25.14)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "pytest-xdist[psutil] (>=3.4)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] + [[package]] name = "sphinx-book-theme" version = "1.1.4" @@ -6804,7 +6908,7 @@ description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version == \"3.10\" and (extra == \"dev\" or extra == \"docs\")" +markers = "python_version < \"3.11\" and (extra == \"dev\" or extra == \"docs\")" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -6844,10 +6948,9 @@ files = [ name = "tomlkit" version = "0.13.3" description = "Style preserving TOML library" -optional = true +optional = false python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"dev\" or extra == \"docs\"" files = [ {file = "tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0"}, {file = "tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1"}, @@ -6872,7 +6975,7 @@ description = "Tensors and Dynamic neural networks in Python with strong GPU acc optional = true python-versions = ">=3.9.0" groups = ["main"] -markers = "(extra == \"dev\" or extra == \"pytorch\") and sys_platform == \"darwin\"" +markers = "sys_platform == \"darwin\" and (extra == \"dev\" or extra == \"pytorch\")" files = [ {file = "torch-2.8.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:0be92c08b44009d4131d1ff7a8060d10bafdb7ddcb7359ef8d8c5169007ea905"}, {file = "torch-2.8.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:89aa9ee820bb39d4d72b794345cccef106b574508dd17dbec457949678c76011"}, @@ -6936,7 +7039,7 @@ description = "Tensors and Dynamic neural networks in Python with strong GPU acc optional = true python-versions = ">=3.9.0" groups = ["main"] -markers = "(extra == \"dev\" or extra == \"pytorch\") and sys_platform != \"darwin\"" +markers = "sys_platform != \"darwin\" and (extra == \"dev\" or extra == \"pytorch\")" files = [ {file = "torch-2.8.0+cpu-cp310-cp310-linux_s390x.whl", hash = "sha256:5d255d259fbc65439b671580e40fdb8faea4644761b64fed90d6904ffe71bbc1"}, {file = "torch-2.8.0+cpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b2149858b8340aeeb1f3056e0bff5b82b96e43b596fe49a9dba3184522261213"}, @@ -7394,7 +7497,7 @@ description = "N-D labeled arrays and datasets in Python" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "python_version == \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "xarray-2025.6.1-py3-none-any.whl", hash = "sha256:8b988b47f67a383bdc3b04c5db475cd165e580134c1f1943d52aee4a9c97651b"}, {file = "xarray-2025.6.1.tar.gz", hash = "sha256:a84f3f07544634a130d7dc615ae44175419f4c77957a7255161ed99c69c7c8b0"}, @@ -7476,4 +7579,4 @@ vtk = ["vtk"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "591568dd5f68370208830b200214d40918ff9e5a85b8c17a44c86a15b867e3b2" +content-hash = "e2491549c5f7dfd486e055811aff518773929057156e7c1bbd07146283be21c0" diff --git a/pyproject.toml b/pyproject.toml index 06fdf22247..a6407e344a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,7 @@ pydantic = "^2.0" PyYAML = "*" dask = "*" toml = "*" +tomlkit = "^0.13.2" autograd = ">=1.7.0" scipy = "*" ### NOT CORE diff --git a/tests/config/conftest.py b/tests/config/conftest.py new file mode 100644 index 0000000000..3d0f065eeb --- /dev/null +++ b/tests/config/conftest.py @@ -0,0 +1,53 @@ +"""Shared fixtures for the configuration test suite.""" + +from __future__ import annotations + +import os + +import pytest + +from tidy3d.config.__init__ import get_manager, reload_config + +_ENV_VARS_TO_CLEAR = { + "TIDY3D_PROFILE", + "TIDY3D_CONFIG_PROFILE", + "TIDY3D_ENV", + "SIMCLOUD_APIKEY", + "TIDY3D_AUTH__APIKEY", + "TIDY3D_WEB__APIKEY", + "TIDY3D_BASE_DIR", +} + + +@pytest.fixture(autouse=True) +def clean_env(monkeypatch): + """Ensure configuration-related env vars do not leak between tests.""" + + original: dict[str, str | None] = {var: os.environ.get(var) for var in _ENV_VARS_TO_CLEAR} + for var in _ENV_VARS_TO_CLEAR: + monkeypatch.delenv(var, raising=False) + try: + yield + finally: + for var, value in original.items(): + if value is None: + monkeypatch.delenv(var, raising=False) + else: + monkeypatch.setenv(var, value) + + +@pytest.fixture +def mock_config_dir(tmp_path, monkeypatch): + """Point the config system at a temporary directory.""" + + base_dir = tmp_path / "config_home" + monkeypatch.setenv("TIDY3D_BASE_DIR", str(base_dir)) + return base_dir / ".tidy3d" + + +@pytest.fixture +def config_manager(mock_config_dir): + """Return a freshly initialized configuration manager.""" + + reload_config(profile="default") + return get_manager() diff --git a/tests/config/test_legacy.py b/tests/config/test_legacy.py new file mode 100644 index 0000000000..a2ab59ab5d --- /dev/null +++ b/tests/config/test_legacy.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +import importlib + +from tidy3d.config.__init__ import get_manager, reload_config + + +def test_legacy_logging_level(config_manager): + cfg = reload_config(profile=config_manager.profile) + cfg.logging_level = "DEBUG" + manager = get_manager() + assert manager.get_section("logging").level == "DEBUG" + + +def test_env_switch(config_manager): + config_module = importlib.import_module("tidy3d.config.__init__") + config_module.Env.dev.active() + assert get_manager().profile == "dev" + config_module.Env.set_current(config_module.Env.prod) + assert get_manager().profile == "prod" diff --git a/tests/config/test_loader.py b/tests/config/test_loader.py new file mode 100644 index 0000000000..f45573cb6b --- /dev/null +++ b/tests/config/test_loader.py @@ -0,0 +1,138 @@ +from __future__ import annotations + +from pathlib import Path + +from click.testing import CliRunner +from pydantic import Field + +from tidy3d.config import get_manager, reload_config +from tidy3d.config import registry as config_registry +from tidy3d.config.sections import ConfigSection +from tidy3d.web.cli.app import tidy3d_cli + + +def _config_path(config_dir: Path) -> Path: + return config_dir / "config.toml" + + +def test_save_includes_descriptions(config_manager, mock_config_dir): + manager = config_manager + manager.save(include_defaults=True) + + content = _config_path(mock_config_dir).read_text(encoding="utf-8") + assert "# Web/HTTP configuration." in content + + +def test_preserves_user_comments(config_manager, mock_config_dir): + manager = config_manager + manager.save(include_defaults=True) + + config_path = _config_path(mock_config_dir) + text = config_path.read_text(encoding="utf-8") + text = text.replace( + "Web/HTTP configuration.", + "user-modified comment", + ) + config_path.write_text(text, encoding="utf-8") + + reload_config(profile="default") + manager = get_manager() + manager.save(include_defaults=True) + + updated = config_path.read_text(encoding="utf-8") + assert "user-modified comment" in updated + assert "Web/HTTP configuration." not in updated + + +def test_profile_preserves_comments(config_manager, mock_config_dir): + @config_registry.register_plugin("profile_comment") + class ProfileComment(ConfigSection): + """Profile comment plugin.""" + + knob: int = Field( + 1, + description="Profile knob description.", + json_schema_extra={"persist": True}, + ) + + try: + manager = config_manager + manager.switch_profile("custom") + manager.update_section("plugins.profile_comment", knob=5) + manager.save() + + profile_path = mock_config_dir / "profiles" / "custom.toml" + text = profile_path.read_text(encoding="utf-8") + assert "Profile knob description." in text + text = text.replace("Profile knob description.", "user comment") + profile_path.write_text(text, encoding="utf-8") + + manager.update_section("plugins.profile_comment", knob=7) + manager.save() + + updated = profile_path.read_text(encoding="utf-8") + assert "user comment" in updated + assert "Profile knob description." not in updated + finally: + config_registry._SECTIONS.pop("plugins.profile_comment", None) + reload_config(profile="default") + + +def test_cli_reset_config(mock_config_dir): + @config_registry.register_plugin("cli_comment") + class CLIPlugin(ConfigSection): + """CLI plugin configuration.""" + + knob: int = Field( + 3, + description="CLI knob description.", + json_schema_extra={"persist": True}, + ) + + try: + reload_config(profile="default") + manager = get_manager() + manager.update_section("web", apikey="secret") + manager.save(include_defaults=True) + manager.switch_profile("custom") + manager.update_section("plugins.cli_comment", knob=42) + manager.save() + + profiles_dir = mock_config_dir / "profiles" + assert profiles_dir.exists() + + runner = CliRunner() + result = runner.invoke(tidy3d_cli, ["config-reset", "--yes"]) + assert result.exit_code == 0, result.output + + config_text = _config_path(mock_config_dir).read_text(encoding="utf-8") + assert "Web/HTTP configuration." in config_text + assert "[web]" in config_text + assert "secret" not in config_text + assert not profiles_dir.exists() + finally: + config_registry._SECTIONS.pop("plugins.cli_comment", None) + reload_config(profile="default") + + +def test_plugin_descriptions(mock_config_dir): + @config_registry.register_plugin("comment_test") + class CommentPlugin(ConfigSection): + """Comment plugin configuration.""" + + knob: int = Field( + 3, + description="Plugin knob description.", + json_schema_extra={"persist": True}, + ) + + try: + reload_config(profile="default") + manager = get_manager() + manager.save(include_defaults=True) + content = _config_path(mock_config_dir).read_text(encoding="utf-8") + assert "Comment plugin configuration." in content + assert "Plugin knob description." in content + finally: + config_registry._SECTIONS.pop("plugins.comment_test", None) + reload_config(profile="default") diff --git a/tests/config/test_manager.py b/tests/config/test_manager.py new file mode 100644 index 0000000000..74275bce8d --- /dev/null +++ b/tests/config/test_manager.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +import numpy as np +import pytest + +from tidy3d.config import Env, get_manager, reload_config + + +def test_default_web_settings(config_manager): + web = config_manager.get_section("web") + assert str(web.api_endpoint) == "https://tidy3d-api.simulation.cloud" + assert str(web.website_endpoint) == "https://tidy3d.simulation.cloud" + assert web.ssl_verify is True + + +def test_update_section_runtime_overlay(config_manager): + config_manager.update_section("logging", level="DEBUG", suppression=False) + logging_section = config_manager.get_section("logging") + assert logging_section.level == "DEBUG" + assert logging_section.suppression is False + + +def test_runtime_isolated_per_profile(config_manager): + config_manager.update_section("web", timeout=45) + config_manager.switch_profile("customer") + assert config_manager.get_section("web").timeout == 120 + config_manager.switch_profile("default") + assert config_manager.get_section("web").timeout == 45 + + +def test_environment_variable_precedence(monkeypatch, config_manager): + monkeypatch.setenv("TIDY3D_LOGGING__LEVEL", "WARNING") + config_manager.switch_profile(config_manager.profile) + config_manager.update_section("logging", level="DEBUG") + logging_section = config_manager.get_section("logging") + # env var should still take precedence + assert logging_section.level == "WARNING" + + +@pytest.mark.parametrize("profile", ["dev", "uat"]) +def test_builtin_profiles(profile, config_manager): + config_manager.switch_profile(profile) + web = config_manager.get_section("web") + assert web.s3_region is not None + + +def test_uppercase_profile_normalization(monkeypatch): + monkeypatch.setenv("TIDY3D_ENV", "DEV") + try: + reload_config() + manager = get_manager() + assert manager.profile == "dev" + web = manager.get_section("web") + assert str(web.api_endpoint) == "https://tidy3d-api.dev-simulation.cloud" + assert Env.current.name == "dev" + finally: + reload_config(profile="default") + + +def test_autograd_defaults(config_manager): + autograd = config_manager.get_section("autograd") + assert autograd.min_wvl_fraction == pytest.approx(5e-2) + assert autograd.points_per_wavelength == 10 + assert autograd.monitor_interval_poly == (1, 1, 1) + assert autograd.quadrature_sample_fraction == pytest.approx(0.4) + assert autograd.gauss_quadrature_order == 7 + assert autograd.edge_clip_tolerance == pytest.approx(1e-9) + assert autograd.minimum_spacing_fraction == pytest.approx(1e-2) + assert autograd.gradient_precision == "single" + assert autograd.max_traced_structures == 500 + assert autograd.max_adjoint_per_fwd == 10 + + +def test_autograd_update_section(config_manager): + config_manager.update_section( + "autograd", + min_wvl_fraction=0.08, + points_per_wavelength=12, + solver_freq_chunk_size=3, + gradient_precision="double", + minimum_spacing_fraction=0.02, + gauss_quadrature_order=5, + edge_clip_tolerance=2e-9, + max_traced_structures=600, + max_adjoint_per_fwd=7, + ) + autograd = config_manager.get_section("autograd") + assert autograd.min_wvl_fraction == pytest.approx(0.08) + assert autograd.points_per_wavelength == 12 + assert autograd.solver_freq_chunk_size == 3 + assert autograd.gauss_quadrature_order == 5 + assert autograd.edge_clip_tolerance == pytest.approx(2e-9) + assert autograd.minimum_spacing_fraction == pytest.approx(0.02) + assert autograd.gradient_precision == "double" + assert autograd.max_traced_structures == 600 + assert autograd.max_adjoint_per_fwd == 7 + + assert autograd.gradient_dtype_float is np.float64 + assert autograd.gradient_dtype_complex is np.complex128 diff --git a/tests/config/test_plugins.py b/tests/config/test_plugins.py new file mode 100644 index 0000000000..afa0cb603b --- /dev/null +++ b/tests/config/test_plugins.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +import toml +from pydantic import Field + +from tidy3d.config.__init__ import get_manager, reload_config +from tidy3d.config.registry import get_sections, register_plugin +from tidy3d.config.sections import ConfigSection + + +def ensure_dummy_plugin(): + if "plugins.dummy" in get_sections(): + return + + @register_plugin("dummy") + class DummyPlugin(ConfigSection): + enabled: bool = Field(False, json_schema_extra={"persist": True}) + precision: int = Field(1, json_schema_extra={"persist": True}) + + +def test_plugin_defaults_available(mock_config_dir): + ensure_dummy_plugin() + assert "plugins.dummy" in get_sections() + reload_config(profile="default") + manager = get_manager() + plugin = manager.plugins.dummy + assert plugin.enabled is False + assert plugin.precision == 1 + + +def test_plugin_updates_persist(mock_config_dir): + ensure_dummy_plugin() + assert "plugins.dummy" in get_sections() + reload_config(profile="default") + manager = get_manager() + manager.update_section("plugins.dummy", enabled=True, precision=4) + manager.save() + config_path = manager.config_dir / "config.toml" + assert config_path.exists() + contents = toml.load(config_path) + + reload_config(profile=manager.profile) + new_manager = get_manager() + plugin = new_manager.plugins.dummy + assert plugin.enabled is True + assert plugin.precision == 4 diff --git a/tests/config/test_profiles.py b/tests/config/test_profiles.py new file mode 100644 index 0000000000..c102ea663a --- /dev/null +++ b/tests/config/test_profiles.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +import toml + + +def test_save_default_profile(config_manager): + config_manager.update_section("web", apikey="token") + config_manager.update_section("web", timeout=30) + config_manager.save() + + config_path = config_manager.config_dir / "config.toml" + assert config_path.exists() + data = toml.load(config_path) + assert data["web"]["apikey"] == "token" + + +def test_save_custom_profile(config_manager): + config_manager.switch_profile("customer") + config_manager.update_section("logging", level="DEBUG") + config_manager.save() + + profile_path = config_manager.config_dir / "profiles" / "customer.toml" + assert profile_path.exists() + data = toml.load(profile_path) + assert data["logging"]["level"] == "DEBUG" diff --git a/tests/test_components/autograd/test_autograd.py b/tests/test_components/autograd/test_autograd.py index cfd55079b0..7042aa33ed 100644 --- a/tests/test_components/autograd/test_autograd.py +++ b/tests/test_components/autograd/test_autograd.py @@ -19,14 +19,10 @@ import tidy3d as td import tidy3d.web as web -from tidy3d.components.autograd.constants import ( - MAX_NUM_TRACED_STRUCTURES, - MIN_WVL_FRACTION_CYLINDER_DISCRETIZE, - MINIMUM_SPACING_FRACTION, -) from tidy3d.components.autograd.derivative_utils import DerivativeInfo from tidy3d.components.autograd.utils import is_tidy_box from tidy3d.components.data.data_array import DataArray +from tidy3d.config import config from tidy3d.exceptions import AdjointError from tidy3d.plugins.polyslab import ComplexPolySlab from tidy3d.web import run, run_async @@ -1251,7 +1247,8 @@ def test_too_many_traced_structures(monkeypatch, use_emulated_run): def make_sim(*args): structure = make_structures(*args)[structure_key] return SIM_BASE.updated_copy( - structures=(MAX_NUM_TRACED_STRUCTURES + 1) * [structure], monitors=[monitor] + structures=(config.autograd.max_traced_structures + 1) * [structure], + monitors=[monitor], ) def objective(*args): @@ -1774,7 +1771,7 @@ def test_adaptive_spacing(eps_real): ) with AssertLogLevel("WARNING", contains_str="Based on the material, the adaptive spacing"): - expected_vjp_spacing = info.wavelength_min * MINIMUM_SPACING_FRACTION + expected_vjp_spacing = info.wavelength_min * config.autograd.minimum_spacing_fraction vjp_spacing = info.adaptive_vjp_spacing() assert np.isclose(expected_vjp_spacing, vjp_spacing), "Unexpected adaptive vjp spacing!" @@ -1808,7 +1805,7 @@ def test_cylinder_discretization(eps_real): ): cylinder = td.Cylinder(axis=2, length=info.wavelength_min, radius=2 * info.wavelength_min) - expected_wvl_mat = info.wavelength_min * MIN_WVL_FRACTION_CYLINDER_DISCRETIZE + expected_wvl_mat = info.wavelength_min * config.autograd.min_wvl_fraction wvl_mat = cylinder._discretization_wavelength(derivative_info=info) assert np.isclose(expected_wvl_mat, wvl_mat), ( diff --git a/tidy3d/components/autograd/constants.py b/tidy3d/components/autograd/constants.py deleted file mode 100644 index dcf252cde8..0000000000 --- a/tidy3d/components/autograd/constants.py +++ /dev/null @@ -1,31 +0,0 @@ -from __future__ import annotations - -import numpy as np - -# minimum fraction of minimum free space wavelength for discretizing cylinder in autograd derivative -MIN_WVL_FRACTION_CYLINDER_DISCRETIZE = 5e-2 -# default number of points per wvl in material for discretizing cylinder in autograd derivative -PTS_PER_WVL_MAT_CYLINDER_DISCRETIZE = 10 - -MAX_NUM_TRACED_STRUCTURES = 500 -MAX_NUM_ADJOINT_PER_FWD = 10 - -GRADIENT_PRECISION = "single" # Options: "single", "double" -GRADIENT_DTYPE_FLOAT = np.float32 if GRADIENT_PRECISION == "single" else np.float64 -GRADIENT_DTYPE_COMPLEX = np.complex64 if GRADIENT_PRECISION == "single" else np.complex128 - -GAUSS_QUADRATURE_ORDER = 7 -QUAD_SAMPLE_FRACTION = 0.4 - -AUTOGRAD_MONITOR_INTERVAL_SPACE_POLY = (1, 1, 1) -AUTOGRAD_MONITOR_INTERVAL_SPACE_CUSTOM = (1, 1, 1) - -DEFAULT_WAVELENGTH_FRACTION = 0.1 -# minimum fraction of minimum free space wavelength to be used when computing adaptive spacing -MINIMUM_SPACING_FRACTION = 1e-2 - -EDGE_CLIP_TOLERANCE = 1e-9 - -# chunk size for processing multiple frequencies in adjoint gradient computation. -# None = process all frequencies at once (no chunking) -ADJOINT_FREQ_CHUNK_SIZE = None diff --git a/tidy3d/components/autograd/derivative_utils.py b/tidy3d/components/autograd/derivative_utils.py index 9ecbe15f03..7c96b7d566 100644 --- a/tidy3d/components/autograd/derivative_utils.py +++ b/tidy3d/components/autograd/derivative_utils.py @@ -10,15 +10,10 @@ from tidy3d.components.data.data_array import FreqDataArray, ScalarFieldDataArray from tidy3d.components.types import ArrayLike, Bound, tidycomplex +from tidy3d.config import config from tidy3d.constants import C_0, EPSILON_0, LARGE_NUMBER, MU_0 from tidy3d.log import log -from .constants import ( - DEFAULT_WAVELENGTH_FRACTION, - GRADIENT_DTYPE_COMPLEX, - GRADIENT_DTYPE_FLOAT, - MINIMUM_SPACING_FRACTION, -) from .types import PathType from .utils import get_static @@ -212,12 +207,16 @@ def _evaluate_with_interpolators( dict[str, np.ndarray] Dictionary mapping component names to field values at coordinates. """ + auto_cfg = config.autograd + float_dtype = auto_cfg.gradient_dtype_float + complex_dtype = auto_cfg.gradient_dtype_complex + coords = DerivativeInfo._nan_to_num_if_needed(coords) - if coords.dtype != GRADIENT_DTYPE_FLOAT and coords.dtype != GRADIENT_DTYPE_COMPLEX: - coords = coords.astype(GRADIENT_DTYPE_FLOAT, copy=False) + if coords.dtype != float_dtype and coords.dtype != complex_dtype: + coords = coords.astype(float_dtype, copy=False) return {name: interp(coords) for name, interp in interpolators.items()} - def create_interpolators(self, dtype=GRADIENT_DTYPE_FLOAT) -> dict: + def create_interpolators(self, dtype: Optional[np.dtype] = None) -> dict: """Create interpolators for field components and permittivity data. Creates and caches ``RegularGridInterpolator`` objects for all field components @@ -227,8 +226,9 @@ def create_interpolators(self, dtype=GRADIENT_DTYPE_FLOAT) -> dict: Parameters ---------- - dtype : np.dtype = GRADIENT_DTYPE_FLOAT - Data type for interpolation coordinates and values. + dtype : np.dtype, optional + Data type for interpolation coordinates and values. Defaults to the + current ``config.autograd.gradient_dtype_float``. Returns ------- @@ -239,6 +239,11 @@ def create_interpolators(self, dtype=GRADIENT_DTYPE_FLOAT) -> dict: """ from scipy.interpolate import RegularGridInterpolator + auto_cfg = config.autograd + if dtype is None: + dtype = auto_cfg.gradient_dtype_float + complex_dtype = auto_cfg.gradient_dtype_complex + cache_key = str(dtype) if cache_key in self._interpolators_cache: return self._interpolators_cache[cache_key] @@ -261,7 +266,7 @@ def _make_lazy_interpolator_group(field_data_dict, group_key, is_field_group=Tru def creator_func(arr=arr, points=points): data = arr.data.astype( - GRADIENT_DTYPE_COMPLEX if np.iscomplexobj(arr.data) else dtype, copy=False + complex_dtype if np.iscomplexobj(arr.data) else dtype, copy=False ) # create interpolator with frequency dimension if "f" in arr.dims: @@ -711,8 +716,8 @@ def _project_in_basis( def adaptive_vjp_spacing( self, - wl_fraction: float = DEFAULT_WAVELENGTH_FRACTION, - min_allowed_spacing_fraction: float = MINIMUM_SPACING_FRACTION, + wl_fraction: Optional[float] = None, + min_allowed_spacing_fraction: Optional[float] = None, ) -> float: """Compute adaptive spacing for finite-difference gradient evaluation. @@ -721,17 +726,27 @@ def adaptive_vjp_spacing( Parameters ---------- - wl_fraction : float = 0.1 - Fraction of wavelength/skin depth to use as spacing. - min_allowed_spacing_fraction : float = 1e-2 - Minimum allowed spacing fraction of free space wavelength to - prevent numerical issues. + wl_fraction : float, optional + Fraction of wavelength/skin depth to use as spacing. Defaults to the configured + ``autograd.default_wavelength_fraction`` when ``None``. + min_allowed_spacing_fraction : float, optional + Minimum allowed spacing fraction of free space wavelength used to + prevent numerical issues. Defaults to ``config.autograd.minimum_spacing_fraction`` + when not specified. Returns ------- float Adaptive spacing value for gradient evaluation. """ + if wl_fraction is None or min_allowed_spacing_fraction is None: + from tidy3d.config import config + + if wl_fraction is None: + wl_fraction = config.autograd.default_wavelength_fraction + if min_allowed_spacing_fraction is None: + min_allowed_spacing_fraction = config.autograd.minimum_spacing_fraction + # handle FreqDataArray or scalar eps_in if isinstance(self.eps_in, FreqDataArray): eps_real = np.asarray(self.eps_in.values, dtype=np.complex128).real diff --git a/tidy3d/components/geometry/base.py b/tidy3d/components/geometry/base.py index 09feaa59d5..96059482e9 100644 --- a/tidy3d/components/geometry/base.py +++ b/tidy3d/components/geometry/base.py @@ -25,7 +25,6 @@ TracedSize, get_static, ) -from tidy3d.components.autograd.constants import GRADIENT_DTYPE_FLOAT from tidy3d.components.autograd.derivative_utils import ( DerivativeInfo, FieldData, @@ -2684,9 +2683,6 @@ def _derivative_face_dielectric( Parameters ---------- - dtype : np.dtype = GRADIENT_DTYPE_FLOAT - Data type for interpolation coordinates and values. - dim_normal : str Surface normal of the face coord_normal_face : float @@ -2757,9 +2753,6 @@ def _derivative_face_pec( Parameters ---------- - dtype : np.dtype = GRADIENT_DTYPE_FLOAT - Data type for interpolation coordinates and values. - dim_normal : str Surface normal of the face axis_normal : Axis @@ -3612,9 +3605,7 @@ def _compute_derivatives(self, derivative_info: DerivativeInfo) -> AutogradField grad_vjps = {} # create interpolators once for all geometries to avoid redundant field data conversions - interpolators = derivative_info.interpolators or derivative_info.create_interpolators( - dtype=GRADIENT_DTYPE_FLOAT - ) + interpolators = derivative_info.interpolators or derivative_info.create_interpolators() for field_path in derivative_info.paths: _, index, *geo_path = field_path diff --git a/tidy3d/components/geometry/polyslab.py b/tidy3d/components/geometry/polyslab.py index 9312fe9149..422b672185 100644 --- a/tidy3d/components/geometry/polyslab.py +++ b/tidy3d/components/geometry/polyslab.py @@ -14,12 +14,6 @@ from numpy.polynomial.legendre import leggauss as _leggauss from tidy3d.components.autograd import AutogradFieldMap, TracedVertices, get_static -from tidy3d.components.autograd.constants import ( - EDGE_CLIP_TOLERANCE, - GAUSS_QUADRATURE_ORDER, - GRADIENT_DTYPE_FLOAT, - QUAD_SAMPLE_FRACTION, -) from tidy3d.components.autograd.derivative_utils import DerivativeInfo from tidy3d.components.autograd.types import TracedFloat from tidy3d.components.base import cached_property, skip_if_fields_missing @@ -35,6 +29,7 @@ PlanePosition, Shapely, ) +from tidy3d.config import config from tidy3d.constants import LARGE_NUMBER, MICROMETER, fp_eps from tidy3d.exceptions import SetupError, Tidy3dImportError, ValidationError from tidy3d.log import log @@ -61,7 +56,9 @@ def leggauss(n): """Cached version of leggauss with dtype conversions.""" g, w = _leggauss(n) - return g.astype(GRADIENT_DTYPE_FLOAT, copy=False), w.astype(GRADIENT_DTYPE_FLOAT, copy=False) + return g.astype(config.autograd.gradient_dtype_float, copy=False), w.astype( + config.autograd.gradient_dtype_float, copy=False + ) class PolySlab(base.Planar): @@ -1163,7 +1160,9 @@ def _edge_events_detection( # sample at a few dilation values dist_list = ( dilation - * np.linspace(0, 1, 1 + _N_SAMPLE_POLYGON_INTERSECT, dtype=GRADIENT_DTYPE_FLOAT)[1:] + * np.linspace( + 0, 1, 1 + _N_SAMPLE_POLYGON_INTERSECT, dtype=config.autograd.gradient_dtype_float + )[1:] ) for dist in dist_list: # offset: we offset the vertices first, and then use shapely to make it proper @@ -1470,7 +1469,7 @@ def _compute_derivatives(self, derivative_info: DerivativeInfo) -> AutogradField # create interpolators once for ALL derivative computations # use provided interpolators if available to avoid redundant field data conversions interpolators = derivative_info.interpolators or derivative_info.create_interpolators( - dtype=GRADIENT_DTYPE_FLOAT + dtype=config.autograd.gradient_dtype_float ) for path in derivative_info.paths: @@ -1514,17 +1513,19 @@ def _z_slices(self, sim_min: np.ndarray, sim_max: np.ndarray, is_2d: bool, dx: f Returns (z_centers, dz, z0, z1). For 2D, returns single center and dz=1. """ if is_2d: - zc = np.array([self.center_axis], dtype=GRADIENT_DTYPE_FLOAT) + zc = np.array([self.center_axis], dtype=config.autograd.gradient_dtype_float) return zc, 1.0, self.center_axis, self.center_axis z0 = max(self.slab_bounds[0], sim_min[self.axis]) z1 = min(self.slab_bounds[1], sim_max[self.axis]) if z1 <= z0: - return np.array([], dtype=GRADIENT_DTYPE_FLOAT), 0.0, z0, z1 + return np.array([], dtype=config.autograd.gradient_dtype_float), 0.0, z0, z1 n_z = max(1, int(np.ceil((z1 - z0) / dx))) dz = (z1 - z0) / n_z - z_centers = np.linspace(z0 + dz / 2, z1 - dz / 2, n_z, dtype=GRADIENT_DTYPE_FLOAT) + z_centers = np.linspace( + z0 + dz / 2, z1 - dz / 2, n_z, dtype=config.autograd.gradient_dtype_float + ) return z_centers, dz, z0, z1 @staticmethod @@ -1533,13 +1534,14 @@ def _clip_edge_to_bounds_t( ) -> Optional[tuple[float, float]]: """Parametric bounds [t0,t1] of segment within [sim_min, sim_max].""" t_start, t_end = 0.0, 1.0 + edge_clip_tolerance = config.autograd.edge_clip_tolerance for dim in range(3): v0_d, v1_d = v0_3d[dim], v1_3d[dim] min_d, max_d = sim_min[dim], sim_max[dim] if np.isclose(v0_d, v1_d): - if v0_d < (min_d - EDGE_CLIP_TOLERANCE) or v0_d > (max_d + EDGE_CLIP_TOLERANCE): + if v0_d < (min_d - edge_clip_tolerance) or v0_d > (max_d + edge_clip_tolerance): return None continue @@ -1553,7 +1555,7 @@ def _clip_edge_to_bounds_t( if t_start >= t_end: return None - if t_end <= t_start + EDGE_CLIP_TOLERANCE: + if t_end <= t_start + edge_clip_tolerance: return None return (t_start, t_end) @@ -1563,26 +1565,34 @@ def _adaptive_edge_samples(L: float, dx: float, t_start: float = 0.0, t_end: flo """Gauss samples and weights along [t_start,t_end] with adaptive count.""" L_eff = L * max(0.0, t_end - t_start) n_uniform = max(1, int(np.ceil(L_eff / dx))) - n_gauss = n_uniform if n_uniform <= 3 else max(2, int(n_uniform * QUAD_SAMPLE_FRACTION)) - if n_gauss <= GAUSS_QUADRATURE_ORDER: + sample_fraction = config.autograd.quadrature_sample_fraction + gauss_quadrature_order = config.autograd.gauss_quadrature_order + n_gauss = n_uniform if n_uniform <= 3 else max(2, int(n_uniform * sample_fraction)) + if n_gauss <= gauss_quadrature_order: g, w = leggauss(n_gauss) s = (0.5 * (t_end - t_start) * g + 0.5 * (t_end + t_start)).astype( - GRADIENT_DTYPE_FLOAT, copy=False + config.autograd.gradient_dtype_float, copy=False + ) + wt = (w * 0.5 * (t_end - t_start)).astype( + config.autograd.gradient_dtype_float, copy=False ) - wt = (w * 0.5 * (t_end - t_start)).astype(GRADIENT_DTYPE_FLOAT, copy=False) return s, wt # composite Gauss with fixed local order - g_loc, w_loc = leggauss(GAUSS_QUADRATURE_ORDER) + g_loc, w_loc = leggauss(gauss_quadrature_order) segs = n_uniform - edges_t = np.linspace(t_start, t_end, segs + 1, dtype=GRADIENT_DTYPE_FLOAT) + edges_t = np.linspace(t_start, t_end, segs + 1, dtype=config.autograd.gradient_dtype_float) S, W = [], [] for i in range(segs): a, b = edges_t[i], edges_t[i + 1] S.append( - (0.5 * (b - a) * g_loc + 0.5 * (b + a)).astype(GRADIENT_DTYPE_FLOAT, copy=False) + (0.5 * (b - a) * g_loc + 0.5 * (b + a)).astype( + config.autograd.gradient_dtype_float, copy=False + ) + ) + W.append( + (w_loc * 0.5 * (b - a)).astype(config.autograd.gradient_dtype_float, copy=False) ) - W.append((w_loc * 0.5 * (b - a)).astype(GRADIENT_DTYPE_FLOAT, copy=False)) return np.concatenate(S), np.concatenate(W) def _collect_sidewall_patches( @@ -1619,7 +1629,7 @@ def _collect_sidewall_patches( dprime = -tan_th # dd/dz # axis unit vector in 3D - axis_vec = np.zeros(3, dtype=GRADIENT_DTYPE_FLOAT) + axis_vec = np.zeros(3, dtype=config.autograd.gradient_dtype_float) axis_vec[self.axis] = 1.0 # densify along axis as |theta| grows: dz scales with cos(theta) @@ -1628,14 +1638,14 @@ def _collect_sidewall_patches( # early exit: no slices if (not is_2d) and len(z_centers) == 0: return { - "centers": np.empty((0, 3), dtype=GRADIENT_DTYPE_FLOAT), - "normals": np.empty((0, 3), dtype=GRADIENT_DTYPE_FLOAT), - "perps1": np.empty((0, 3), dtype=GRADIENT_DTYPE_FLOAT), - "perps2": np.empty((0, 3), dtype=GRADIENT_DTYPE_FLOAT), - "Ls": np.empty((0,), dtype=GRADIENT_DTYPE_FLOAT), - "s_vals": np.empty((0,), dtype=GRADIENT_DTYPE_FLOAT), - "s_weights": np.empty((0,), dtype=GRADIENT_DTYPE_FLOAT), - "zc_vals": np.empty((0,), dtype=GRADIENT_DTYPE_FLOAT), + "centers": np.empty((0, 3), dtype=config.autograd.gradient_dtype_float), + "normals": np.empty((0, 3), dtype=config.autograd.gradient_dtype_float), + "perps1": np.empty((0, 3), dtype=config.autograd.gradient_dtype_float), + "perps2": np.empty((0, 3), dtype=config.autograd.gradient_dtype_float), + "Ls": np.empty((0,), dtype=config.autograd.gradient_dtype_float), + "s_vals": np.empty((0,), dtype=config.autograd.gradient_dtype_float), + "s_weights": np.empty((0,), dtype=config.autograd.gradient_dtype_float), + "zc_vals": np.empty((0,), dtype=config.autograd.gradient_dtype_float), "dz": dz, "edge_indices": np.empty((0,), dtype=int), } @@ -1654,22 +1664,22 @@ def _collect_sidewall_patches( estimated_patches = int(max(1, estimated_patches) * 1.2) # pre-allocate arrays - centers = np.empty((estimated_patches, 3), dtype=GRADIENT_DTYPE_FLOAT) - normals = np.empty((estimated_patches, 3), dtype=GRADIENT_DTYPE_FLOAT) - perps1 = np.empty((estimated_patches, 3), dtype=GRADIENT_DTYPE_FLOAT) - perps2 = np.empty((estimated_patches, 3), dtype=GRADIENT_DTYPE_FLOAT) - Ls = np.empty((estimated_patches,), dtype=GRADIENT_DTYPE_FLOAT) - s_vals = np.empty((estimated_patches,), dtype=GRADIENT_DTYPE_FLOAT) - s_weights = np.empty((estimated_patches,), dtype=GRADIENT_DTYPE_FLOAT) - zc_vals = np.empty((estimated_patches,), dtype=GRADIENT_DTYPE_FLOAT) + centers = np.empty((estimated_patches, 3), dtype=config.autograd.gradient_dtype_float) + normals = np.empty((estimated_patches, 3), dtype=config.autograd.gradient_dtype_float) + perps1 = np.empty((estimated_patches, 3), dtype=config.autograd.gradient_dtype_float) + perps2 = np.empty((estimated_patches, 3), dtype=config.autograd.gradient_dtype_float) + Ls = np.empty((estimated_patches,), dtype=config.autograd.gradient_dtype_float) + s_vals = np.empty((estimated_patches,), dtype=config.autograd.gradient_dtype_float) + s_weights = np.empty((estimated_patches,), dtype=config.autograd.gradient_dtype_float) + zc_vals = np.empty((estimated_patches,), dtype=config.autograd.gradient_dtype_float) edge_indices = np.empty((estimated_patches,), dtype=int) patch_idx = 0 # if the simulation is effectively 2D (one tangential dimension collapsed), # slightly expand degenerate bounds to enable finite-length clipping of edges. - sim_min_eff = np.array(sim_min, dtype=GRADIENT_DTYPE_FLOAT) - sim_max_eff = np.array(sim_max, dtype=GRADIENT_DTYPE_FLOAT) + sim_min_eff = np.array(sim_min, dtype=config.autograd.gradient_dtype_float) + sim_max_eff = np.array(sim_max, dtype=config.autograd.gradient_dtype_float) for dim in range(3): if dim == self.axis: continue @@ -1703,11 +1713,15 @@ def _collect_sidewall_patches( # clip offset edge against simulation bounds in 3D v0_3d = ( - self.unpop_axis_vect(np.array([zc], dtype=GRADIENT_DTYPE_FLOAT), v0[None])[0] + self.unpop_axis_vect( + np.array([zc], dtype=config.autograd.gradient_dtype_float), v0[None] + )[0] + offset3d ) v1_3d = ( - self.unpop_axis_vect(np.array([zc], dtype=GRADIENT_DTYPE_FLOAT), v1[None])[0] + self.unpop_axis_vect( + np.array([zc], dtype=config.autograd.gradient_dtype_float), v1[None] + )[0] + offset3d ) clip = self._clip_edge_to_bounds_t(v0_3d, v1_3d, sim_min_eff, sim_max_eff) @@ -1723,7 +1737,7 @@ def _collect_sidewall_patches( pts2d = v0 + np.outer(s_list, edge_vec) xyz = ( self.unpop_axis_vect( - np.full(len(s_list), zc, dtype=GRADIENT_DTYPE_FLOAT), pts2d + np.full(len(s_list), zc, dtype=config.autograd.gradient_dtype_float), pts2d ) + offset3d ) @@ -1815,7 +1829,9 @@ def _compute_derivative_sidewall_angle( Therefore each patch weight is w = L * dz * (-(z - z_ref)) / cos(theta)^2. """ if interpolators is None: - interpolators = derivative_info.create_interpolators(dtype=GRADIENT_DTYPE_FLOAT) + interpolators = derivative_info.create_interpolators( + dtype=config.autograd.gradient_dtype_float + ) # 2D sim => no dependence on theta (z_local=0) if is_2d: @@ -1945,7 +1961,9 @@ def compute_derivative_slab_bounds_line( dx = derivative_info.adaptive_vjp_spacing() n_seg = max(1, int(np.ceil(length / dx))) - coords = np.linspace(l_min, l_max, 2 * n_seg + 1, dtype=GRADIENT_DTYPE_FLOAT)[1::2] + coords = np.linspace( + l_min, l_max, 2 * n_seg + 1, dtype=config.autograd.gradient_dtype_float + )[1::2] # build XY coordinates and in-plane direction vectors if line_dim == 0: @@ -2005,10 +2023,10 @@ def compute_derivative_slab_bounds_surface( g2, w2 = leggauss(n2) coords1 = (0.5 * (r1_max - r1_min) * g1 + 0.5 * (r1_max + r1_min)).astype( - GRADIENT_DTYPE_FLOAT, copy=False + config.autograd.gradient_dtype_float, copy=False ) coords2 = (0.5 * (r2_max - r2_min) * g2 + 0.5 * (r2_max + r2_min)).astype( - GRADIENT_DTYPE_FLOAT, copy=False + config.autograd.gradient_dtype_float, copy=False ) r1_grid, r2_grid = np.meshgrid(coords1, coords2, indexing="ij") @@ -2021,29 +2039,33 @@ def compute_derivative_slab_bounds_surface( return 0.0 xyz = self.unpop_axis_vect( - np.full(in_face.sum(), ax_val, dtype=GRADIENT_DTYPE_FLOAT), pts[in_face] + np.full(in_face.sum(), ax_val, dtype=config.autograd.gradient_dtype_float), pts[in_face] ) n_patches = xyz.shape[0] normals_xyz = self.unpop_axis_vect( - np.full(n_patches, -1 if min_max_index == 0 else 1, dtype=GRADIENT_DTYPE_FLOAT), - np.zeros((n_patches, 2), dtype=GRADIENT_DTYPE_FLOAT), + np.full( + n_patches, + -1 if min_max_index == 0 else 1, + dtype=config.autograd.gradient_dtype_float, + ), + np.zeros((n_patches, 2), dtype=config.autograd.gradient_dtype_float), ) perps1_xyz = self.unpop_axis_vect( - np.zeros(n_patches, dtype=GRADIENT_DTYPE_FLOAT), + np.zeros(n_patches, dtype=config.autograd.gradient_dtype_float), np.column_stack( ( - np.ones(n_patches, dtype=GRADIENT_DTYPE_FLOAT), - np.zeros(n_patches, dtype=GRADIENT_DTYPE_FLOAT), + np.ones(n_patches, dtype=config.autograd.gradient_dtype_float), + np.zeros(n_patches, dtype=config.autograd.gradient_dtype_float), ) ), ) perps2_xyz = self.unpop_axis_vect( - np.zeros(n_patches, dtype=GRADIENT_DTYPE_FLOAT), + np.zeros(n_patches, dtype=config.autograd.gradient_dtype_float), np.column_stack( ( - np.zeros(n_patches, dtype=GRADIENT_DTYPE_FLOAT), - np.ones(n_patches, dtype=GRADIENT_DTYPE_FLOAT), + np.zeros(n_patches, dtype=config.autograd.gradient_dtype_float), + np.ones(n_patches, dtype=config.autograd.gradient_dtype_float), ) ), ) @@ -2100,7 +2122,9 @@ def _compute_derivative_vertices( # use provided interpolators or create them if not provided if interpolators is None: - interpolators = derivative_info.create_interpolators(dtype=GRADIENT_DTYPE_FLOAT) + interpolators = derivative_info.create_interpolators( + dtype=config.autograd.gradient_dtype_float + ) # evaluate integrand g = derivative_info.evaluate_gradient_at_points( @@ -2135,7 +2159,7 @@ def _compute_derivative_vertices( vjp_per_vertex = np.stack((v0x + v1x, v0y + v1y), axis=1) return vjp_per_vertex - def _edge_geometry_arrays(self, dtype: np.dtype = GRADIENT_DTYPE_FLOAT): + def _edge_geometry_arrays(self, dtype: np.dtype = config.autograd.gradient_dtype_float): """Return (vertices, next_v, edges, basis) arrays for sidewall edge geometry.""" vertices = np.asarray(self.vertices, dtype=dtype) next_v = np.roll(vertices, -1, axis=0) @@ -2150,11 +2174,11 @@ def edge_basis_vectors( """Normalized basis vectors for ``normal`` direction, ``slab`` tangent direction and ``edge``.""" # ensure edges have consistent dtype - edges = edges.astype(GRADIENT_DTYPE_FLOAT, copy=False) + edges = edges.astype(config.autograd.gradient_dtype_float, copy=False) num_vertices, _ = edges.shape - zeros = np.zeros(num_vertices, dtype=GRADIENT_DTYPE_FLOAT) - ones = np.ones(num_vertices, dtype=GRADIENT_DTYPE_FLOAT) + zeros = np.zeros(num_vertices, dtype=config.autograd.gradient_dtype_float) + ones = np.ones(num_vertices, dtype=config.autograd.gradient_dtype_float) # normalized vectors along edges edges_norm_in_plane = self.normalize_vect(edges) @@ -2166,7 +2190,7 @@ def edge_basis_vectors( slabs_axis_components = cos_angle * ones # create axis_norm as array directly to avoid tuple->array conversion in np.cross - axis_norm = np.zeros(3, dtype=GRADIENT_DTYPE_FLOAT) + axis_norm = np.zeros(3, dtype=config.autograd.gradient_dtype_float) axis_norm[self.axis] = 1.0 slab_normal_xyz = -sin_angle * np.cross(edges_norm_xyz, axis_norm) _, slab_normal_in_plane = self.pop_axis_vect(slab_normal_xyz) diff --git a/tidy3d/components/geometry/primitives.py b/tidy3d/components/geometry/primitives.py index 72feac6314..badb370270 100644 --- a/tidy3d/components/geometry/primitives.py +++ b/tidy3d/components/geometry/primitives.py @@ -11,13 +11,10 @@ import shapely from tidy3d.components.autograd import AutogradFieldMap, TracedSize1D -from tidy3d.components.autograd.constants import ( - MIN_WVL_FRACTION_CYLINDER_DISCRETIZE, - PTS_PER_WVL_MAT_CYLINDER_DISCRETIZE, -) from tidy3d.components.autograd.derivative_utils import DerivativeInfo from tidy3d.components.base import cached_property, skip_if_fields_missing from tidy3d.components.types import Axis, Bound, Coordinate, MatrixReal4x4, Shapely +from tidy3d.config import config from tidy3d.constants import LARGE_NUMBER, MICROMETER from tidy3d.exceptions import SetupError, ValidationError from tidy3d.log import log @@ -287,7 +284,9 @@ def _discretization_wavelength(self, derivative_info: DerivativeInfo) -> float: wvl0_min = derivative_info.wavelength_min wvl_mat = wvl0_min / np.max([1.0, np.max(np.sqrt(abs(derivative_info.eps_in)))]) - min_wvl_mat = MIN_WVL_FRACTION_CYLINDER_DISCRETIZE * wvl0_min + grid_cfg = config.autograd + + min_wvl_mat = grid_cfg.min_wvl_fraction * wvl0_min if wvl_mat < min_wvl_mat: log.warning( f"The minimum wavelength inside the cylinder material is {wvl_mat:.3e} μm, which would " @@ -309,9 +308,8 @@ def _compute_derivatives(self, derivative_info: DerivativeInfo) -> AutogradField circumference = 2 * np.pi * self.radius wvls_in_circumference = circumference / wvl_mat - num_pts_circumference = int( - np.ceil(PTS_PER_WVL_MAT_CYLINDER_DISCRETIZE * wvls_in_circumference) - ) + grid_cfg = config.autograd + num_pts_circumference = int(np.ceil(grid_cfg.points_per_wavelength * wvls_in_circumference)) num_pts_circumference = max(3, num_pts_circumference) # construct equivalent polyslab and compute the derivatives diff --git a/tidy3d/components/structure.py b/tidy3d/components/structure.py index 4c4401088f..e17e3c36db 100644 --- a/tidy3d/components/structure.py +++ b/tidy3d/components/structure.py @@ -11,10 +11,7 @@ import numpy as np import pydantic.v1 as pydantic -from tidy3d.components.autograd.constants import ( - AUTOGRAD_MONITOR_INTERVAL_SPACE_CUSTOM, - AUTOGRAD_MONITOR_INTERVAL_SPACE_POLY, -) +from tidy3d.config import config from tidy3d.constants import MICROMETER from tidy3d.exceptions import SetupError, Tidy3dImportError from tidy3d.log import log @@ -316,10 +313,12 @@ def _make_adjoint_monitors( size = [get_static(x) for x in box.size] center = [get_static(x) for x in box.center] + monitor_cfg = config.autograd + if contains("medium", field_keys): - interval_space = AUTOGRAD_MONITOR_INTERVAL_SPACE_CUSTOM + interval_space = monitor_cfg.monitor_interval_custom else: - interval_space = AUTOGRAD_MONITOR_INTERVAL_SPACE_POLY + interval_space = monitor_cfg.monitor_interval_poly field_components_for_adjoint = [f"E{dim}" for dim in "xyz"] if self.medium.is_pec: diff --git a/tidy3d/config.py b/tidy3d/config.py deleted file mode 100644 index 9dfc7b702c..0000000000 --- a/tidy3d/config.py +++ /dev/null @@ -1,60 +0,0 @@ -"""Sets the configuration of the script, can be changed with `td.config.config_name = new_val`.""" - -from __future__ import annotations - -from typing import Optional - -import pydantic.v1 as pd - -from .log import DEFAULT_LEVEL, LogLevel, set_log_suppression, set_logging_level - - -class Tidy3dConfig(pd.BaseModel): - """configuration of tidy3d""" - - class Config: - """Config of the config.""" - - arbitrary_types_allowed = False - validate_all = True - extra = "forbid" - validate_assignment = True - allow_population_by_field_name = True - frozen = False - - logging_level: LogLevel = pd.Field( - DEFAULT_LEVEL, - title="Logging Level", - description="The lowest level of logging output that will be displayed. " - 'Can be "DEBUG", "SUPPORT", "USER", INFO", "WARNING", "ERROR", or "CRITICAL". ' - 'Note: "SUPPORT" and "USER" levels are only used in backend solver logging.', - ) - - log_suppression: bool = pd.Field( - True, - title="Log suppression", - description="Enable or disable suppression of certain log messages when they are repeated " - "for several elements.", - ) - - use_local_subpixel: Optional[bool] = pd.Field( - None, - title="Whether to use local subpixel averaging. If 'None', local subpixel " - "averaging will be used if 'tidy3d-extras' is installed and not used otherwise.", - ) - - @pd.validator("logging_level", pre=True, always=True) - def _set_logging_level(cls, val): - """Set the logging level if logging_level is changed.""" - set_logging_level(val) - return val - - @pd.validator("log_suppression", pre=True, always=True) - def _set_log_suppression(cls, val): - """Control log suppression when log_suppression is changed.""" - set_log_suppression(val) - return val - - -# instance of the config that can be modified. -config = Tidy3dConfig() diff --git a/tidy3d/config/__init__.py b/tidy3d/config/__init__.py new file mode 100644 index 0000000000..ccde634574 --- /dev/null +++ b/tidy3d/config/__init__.py @@ -0,0 +1,62 @@ +"""Tidy3D configuration system public API.""" + +from __future__ import annotations + +from typing import Any + +from . import sections # noqa: F401 - ensure builtin sections register +from .legacy import LegacyConfigWrapper, LegacyEnvironment, LegacyEnvironmentConfig +from .manager import ConfigManager +from .registry import ( + get_handlers, + get_sections, + register_handler, + register_plugin, + register_section, +) + +__all__ = [ + "ConfigManager", + "Env", + "Environment", + "EnvironmentConfig", + "config", + "get_handlers", + "get_sections", + "register_handler", + "register_plugin", + "register_section", +] + + +def _create_manager() -> ConfigManager: + return ConfigManager() + + +_base_manager = _create_manager() +_config_wrapper = LegacyConfigWrapper(_base_manager) +config = _config_wrapper + +Environment = LegacyEnvironment +EnvironmentConfig = LegacyEnvironmentConfig +Env = LegacyEnvironment(_base_manager) + + +def reload_config(*, profile: str | None = None) -> LegacyConfigWrapper: + """Recreate the global configuration manager (primarily for tests).""" + + global _base_manager, Env + _base_manager = ConfigManager(profile=profile) + _config_wrapper.reset_manager(_base_manager) + Env.reset_manager(_base_manager) + return _config_wrapper + + +def get_manager() -> ConfigManager: + """Return the underlying configuration manager instance.""" + + return _base_manager + + +def __getattr__(name: str) -> Any: # pragma: no cover - defensive re-export + return getattr(config, name) diff --git a/tidy3d/config/legacy.py b/tidy3d/config/legacy.py new file mode 100644 index 0000000000..547c160ae9 --- /dev/null +++ b/tidy3d/config/legacy.py @@ -0,0 +1,324 @@ +"""Legacy compatibility layer for tidy3d.config.""" + +from __future__ import annotations + +import os +import ssl +from typing import Any, Optional + +from .manager import ConfigManager, normalize_profile_name +from .profiles import BUILTIN_PROFILES + + +class LegacyConfigWrapper: + """Provide attribute-level compatibility with the legacy config module.""" + + def __init__(self, manager: ConfigManager): + self._manager = manager + self._frozen = False # retained for backwards compatibility tests + + # ------------------------------------------------------------------ + # Legacy attributes + # ------------------------------------------------------------------ + @property + def logging_level(self): + return self._manager.get_section("logging").level + + @logging_level.setter + def logging_level(self, value): + from warnings import warn + + warn( + "'config.logging_level' is deprecated; use 'config.logging.level' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._manager.update_section("logging", level=value) + + @property + def log_suppression(self): + return self._manager.get_section("logging").suppression + + @log_suppression.setter + def log_suppression(self, value): + from warnings import warn + + warn( + "'config.log_suppression' is deprecated; use 'config.logging.suppression'.", + DeprecationWarning, + stacklevel=2, + ) + self._manager.update_section("logging", suppression=value) + + @property + def use_local_subpixel(self): + return self._manager.get_section("simulation").use_local_subpixel + + @use_local_subpixel.setter + def use_local_subpixel(self, value): + from warnings import warn + + warn( + "'config.use_local_subpixel' is deprecated; use 'config.simulation.use_local_subpixel'.", + DeprecationWarning, + stacklevel=2, + ) + self._manager.update_section("simulation", use_local_subpixel=value) + + @property + def frozen(self): # pragma: no cover - legacy testing hook + return self._frozen + + @frozen.setter + def frozen(self, value): # pragma: no cover - legacy testing hook + self._frozen = bool(value) + + # ------------------------------------------------------------------ + # Delegation + # ------------------------------------------------------------------ + def save(self, include_defaults: bool = False): + self._manager.save(include_defaults=include_defaults) + + def reset_manager(self, manager: ConfigManager) -> None: + """Swap the underlying manager instance.""" + + self._manager = manager + + def __getattr__(self, name: str) -> Any: + return getattr(self._manager, name) + + def __setattr__(self, name: str, value: Any) -> None: + if name.startswith("_"): + object.__setattr__(self, name, value) + elif name in {"logging_level", "log_suppression", "use_local_subpixel", "frozen"}: + prop = getattr(type(self), name) + prop.fset(self, value) # type: ignore[attr-defined] + else: + setattr(self._manager, name, value) + + +class LegacyEnvironmentConfig: + """Backward compatible environment config wrapper.""" + + def __init__( + self, + manager: Optional[ConfigManager] = None, + name: Optional[str] = None, + *, + web_api_endpoint: Optional[str] = None, + website_endpoint: Optional[str] = None, + s3_region: Optional[str] = None, + ssl_verify: Optional[bool] = None, + enable_caching: Optional[bool] = None, + ssl_version: Optional[ssl.TLSVersion] = None, + env_vars: Optional[dict[str, str]] = None, + environment: Optional[LegacyEnvironment] = None, + ) -> None: + if name is None: + raise ValueError("Environment name is required") + name = normalize_profile_name(name) + self._manager = manager + self._name = name + self._environment = environment + self._overrides: dict[str, Any] = {} + if web_api_endpoint is not None: + self._overrides["api_endpoint"] = web_api_endpoint + if website_endpoint is not None: + self._overrides["website_endpoint"] = website_endpoint + if s3_region is not None: + self._overrides["s3_region"] = s3_region + if ssl_verify is not None: + self._overrides["ssl_verify"] = ssl_verify + if enable_caching is not None: + self._overrides["enable_caching"] = enable_caching + if ssl_version is not None: + self._overrides["ssl_version"] = ssl_version + if env_vars is not None: + self._overrides["env_vars"] = dict(env_vars) + + @property + def manager(self) -> Optional[ConfigManager]: + return self._manager + + def active(self) -> None: + if self._manager is not None and self._manager.profile != self._name: + self._manager.switch_profile(self._name) + + environment = self._environment + if environment is None: + from tidy3d.config import Env # local import to avoid circular + + environment = Env + + environment.set_current(self) + + # legacy attribute names ------------------------------------------------- + @property + def web_api_endpoint(self) -> Optional[str]: + value = self._value("api_endpoint") + return _maybe_str(value) + + @property + def website_endpoint(self) -> Optional[str]: + value = self._value("website_endpoint") + return _maybe_str(value) + + @property + def s3_region(self) -> Optional[str]: + return self._value("s3_region") + + @property + def ssl_verify(self) -> bool: + value = self._value("ssl_verify") + if value is None: + return True + return bool(value) + + @property + def enable_caching(self) -> bool: + value = self._value("enable_caching") + if value is None: + return True + return bool(value) + + @enable_caching.setter + def enable_caching(self, value: bool) -> None: + self._overrides["enable_caching"] = value + if self._manager and self._manager.profile == self._name: + self._manager.update_section("web", enable_caching=value) + + @property + def ssl_version(self): + return self._value("ssl_version") + + @property + def env_vars(self): + value = self._value("env_vars") + if value is None: + return {} + return dict(value) + + @env_vars.setter + def env_vars(self, value: dict[str, str]) -> None: + self._overrides["env_vars"] = dict(value) + + @property + def name(self) -> str: + return self._name + + @name.setter + def name(self, value: str) -> None: + self._name = normalize_profile_name(value) + + def get_real_url(self, path: str) -> str: + endpoint = self.web_api_endpoint or "" + return "/".join([endpoint.rstrip("/"), path.lstrip("/")]) + + # internals -------------------------------------------------------------- + @property + def _web_section(self): + section = {} + if self._manager is not None: + if self._manager.profile == self._name: + source = self._manager.as_dict().get("web", {}) + else: + source = self._manager.preview_profile(self._name).get("web", {}) + if isinstance(source, dict): + section.update(source) + for key, value in self._overrides.items(): + if value is not None: + section[key] = value + return section + + def _value(self, key: str) -> Any: + if key in self._overrides and self._overrides[key] is not None: + return self._overrides[key] + return self._web_section.get(key) + + +class LegacyEnvironment: + """Legacy Env wrapper that maps to profiles.""" + + def __init__(self, manager: ConfigManager): + self._previous_env_vars: dict[str, Optional[str]] = {} + self.reset_manager(manager) + + def reset_manager(self, manager: ConfigManager) -> None: + self._manager = manager + self.env_map: dict[str, LegacyEnvironmentConfig] = {} + for name in BUILTIN_PROFILES: + self.env_map[name] = LegacyEnvironmentConfig(manager, name, environment=self) + + desired_env = os.getenv("TIDY3D_ENV") + if desired_env: + desired = normalize_profile_name(desired_env) + else: + desired = manager.profile + + if desired == "default": + desired = "prod" + + desired = normalize_profile_name(desired) + + self._current = self.env_map.setdefault( + desired, LegacyEnvironmentConfig(manager, desired, environment=self) + ) + self._apply_env_vars(self._current) + + @property + def current(self) -> LegacyEnvironmentConfig: + return self._current + + def set_current(self, env_config: LegacyEnvironmentConfig) -> None: + key = normalize_profile_name(env_config.name) + if env_config.manager is self._manager: + if self._manager.profile != key: + self._manager.switch_profile(key) + stored = self.env_map.setdefault(key, env_config) + else: + stored = env_config + stored.name = key + self.env_map[key] = stored + + stored._environment = self + self._current = stored + self._apply_env_vars(stored) + + def enable_caching(self, enable_caching: bool = True) -> None: + if self._current.manager is self._manager: + self._manager.update_section("web", enable_caching=enable_caching) + self._current.enable_caching = enable_caching + + def set_ssl_version(self, ssl_version) -> None: + if self._current.manager is self._manager: + self._manager.update_section("web", ssl_version=ssl_version) + self._current._overrides["ssl_version"] = ssl_version + + def __getattr__(self, name: str) -> LegacyEnvironmentConfig: + key = normalize_profile_name(name) + return self.env_map.setdefault(key, LegacyEnvironmentConfig(self._manager, key)) + + def _apply_env_vars(self, config: LegacyEnvironmentConfig) -> None: + self._restore_env_vars() + env_vars = config.env_vars or {} + self._previous_env_vars = {} + for key, value in env_vars.items(): + self._previous_env_vars[key] = os.environ.get(key) + os.environ[key] = value + + def _restore_env_vars(self) -> None: + for key, previous in self._previous_env_vars.items(): + if previous is None: + os.environ.pop(key, None) + else: + os.environ[key] = previous + self._previous_env_vars = {} + + +def _maybe_str(value: Any) -> Optional[str]: + if value is None: + return None + return str(value) + + +__all__ = ["LegacyConfigWrapper", "LegacyEnvironment", "LegacyEnvironmentConfig"] diff --git a/tidy3d/config/loader.py b/tidy3d/config/loader.py new file mode 100644 index 0000000000..5075b120cc --- /dev/null +++ b/tidy3d/config/loader.py @@ -0,0 +1,317 @@ +"""Filesystem helpers and persistence utilities for the configuration system.""" + +from __future__ import annotations + +import os +import shutil +import tempfile +from copy import deepcopy +from pathlib import Path +from typing import Any, Optional + +import toml +import tomlkit + +from tidy3d.log import log + +from .profiles import BUILTIN_PROFILES +from .serializer import build_document, collect_descriptions + + +class ConfigLoader: + """Handle reading and writing configuration files.""" + + def __init__(self, config_dir: Optional[Path] = None): + self.config_dir = config_dir or resolve_config_directory() + self.config_dir.mkdir(mode=0o700, parents=True, exist_ok=True) + self._docs: dict[Path, tomlkit.TOMLDocument] = {} + self._descriptions: Optional[dict[tuple[str, ...], str]] = None + + # ------------------------------------------------------------------ + # Loading helpers + # ------------------------------------------------------------------ + def load_base(self) -> dict[str, Any]: + """Load base configuration from config.toml.""" + + config_path = self.config_dir / "config.toml" + return self._read_toml(config_path) + + def load_user_profile(self, profile: str) -> dict[str, Any]: + """Load user profile overrides (if any).""" + + if profile in ("default", "prod"): + # default and prod share the same baseline; user overrides live in config.toml + return {} + + profile_path = self.profile_path(profile) + return self._read_toml(profile_path) + + def get_builtin_profile(self, profile: str) -> dict[str, Any]: + """Return builtin profile data if available.""" + + return BUILTIN_PROFILES.get(profile, {}) + + # ------------------------------------------------------------------ + # Save helpers + # ------------------------------------------------------------------ + def save_base(self, data: dict[str, Any]) -> None: + """Persist base configuration.""" + + config_path = self.config_dir / "config.toml" + self._atomic_write(config_path, data) + + def save_profile(self, profile: str, data: dict[str, Any]) -> None: + """Persist profile overrides (remove file if empty).""" + + profile_path = self.profile_path(profile) + if not data: + if profile_path.exists(): + profile_path.unlink() + self._docs.pop(profile_path, None) + return + profile_path.parent.mkdir(mode=0o700, parents=True, exist_ok=True) + self._atomic_write(profile_path, data) + + # ------------------------------------------------------------------ + # Utilities + # ------------------------------------------------------------------ + def profile_path(self, profile: str) -> Path: + """Return on-disk path for a profile.""" + + return self.config_dir / "profiles" / f"{profile}.toml" + + def _read_toml(self, path: Path) -> dict[str, Any]: + if not path.exists(): + self._docs.pop(path, None) + return {} + + try: + text = path.read_text(encoding="utf-8") + except Exception as exc: # pragma: no cover - defensive + log.warning(f"Failed to read configuration file '{path}': {exc}") + self._docs.pop(path, None) + return {} + + try: + document = tomlkit.parse(text) + except Exception as exc: # pragma: no cover - defensive + log.warning(f"Failed to parse configuration file '{path}': {exc}") + document = tomlkit.document() + self._docs[path] = document + + try: + return toml.loads(text) + except Exception as exc: # pragma: no cover - defensive + log.warning(f"Failed to decode configuration file '{path}': {exc}") + return {} + + def _atomic_write(self, path: Path, data: dict[str, Any]) -> None: + path.parent.mkdir(mode=0o700, parents=True, exist_ok=True) + tmp_dir = path.parent + + cleaned = _clean_data(deepcopy(data)) + descriptions = self._descriptions or collect_descriptions() + self._descriptions = descriptions + + base_document = self._docs.get(path) + document = build_document(cleaned, base_document, descriptions) + toml_text = tomlkit.dumps(document) + + with tempfile.NamedTemporaryFile( + "w", dir=tmp_dir, delete=False, encoding="utf-8" + ) as handle: + tmp_path = Path(handle.name) + handle.write(toml_text) + handle.flush() + os.fsync(handle.fileno()) + + backup_path = path.with_suffix(path.suffix + ".bak") + try: + if path.exists(): + shutil.copy2(path, backup_path) + tmp_path.replace(path) + os.chmod(path, 0o600) + if backup_path.exists(): + backup_path.unlink() + except Exception: # pragma: no cover - best effort restoration + if tmp_path.exists(): + tmp_path.unlink() + if backup_path.exists(): + try: + backup_path.replace(path) + except Exception: # pragma: no cover + log.warning("Failed to restore configuration backup") + raise + + self._docs[path] = tomlkit.parse(toml_text) + + +# ---------------------------------------------------------------------- +# Environment variable parsing +# ---------------------------------------------------------------------- + + +def load_environment_overrides() -> dict[str, Any]: + """Parse environment variables into a nested configuration dict.""" + + overrides: dict[str, Any] = {} + for key, value in os.environ.items(): + if key == "SIMCLOUD_APIKEY": + _assign_path(overrides, ("web", "apikey"), value) + continue + if not key.startswith("TIDY3D_"): + continue + rest = key[len("TIDY3D_") :] + if "__" not in rest: + continue + segments = tuple(segment.lower() for segment in rest.split("__") if segment) + if not segments: + continue + if segments[0] == "auth": + segments = ("web",) + segments[1:] + _assign_path(overrides, segments, value) + return overrides + + +# ---------------------------------------------------------------------- +# Dictionary helpers +# ---------------------------------------------------------------------- + + +def deep_merge(*sources: dict[str, Any]) -> dict[str, Any]: + """Deep merge multiple dictionaries into a new dict.""" + + result: dict[str, Any] = {} + for source in sources: + _merge_into(result, source) + return result + + +def _merge_into(target: dict[str, Any], source: dict[str, Any]) -> None: + for key, value in source.items(): + if isinstance(value, dict): + node = target.setdefault(key, {}) + if isinstance(node, dict): + _merge_into(node, value) + else: + target[key] = _deep_copy_dict(value) + else: + target[key] = value + + +def deep_diff(base: dict[str, Any], target: dict[str, Any]) -> dict[str, Any]: + """Return keys from target that differ from base.""" + + diff: dict[str, Any] = {} + keys = set(base.keys()) | set(target.keys()) + for key in keys: + base_value = base.get(key) + target_value = target.get(key) + if isinstance(base_value, dict) and isinstance(target_value, dict): + nested = deep_diff(base_value, target_value) + if nested: + diff[key] = nested + elif target_value != base_value: + if isinstance(target_value, dict): + diff[key] = _deep_copy_dict(target_value) + else: + diff[key] = target_value + return diff + + +def _assign_path(target: dict[str, Any], path: tuple[str, ...], value: Any) -> None: + node = target + for segment in path[:-1]: + node = node.setdefault(segment, {}) + node[path[-1]] = value + + +def _deep_copy_dict(data: dict[str, Any]) -> dict[str, Any]: + return { + key: _deep_copy_dict(value) if isinstance(value, dict) else value + for key, value in data.items() + } + + +# ---------------------------------------------------------------------- +# Filesystem helpers +# ---------------------------------------------------------------------- + + +def _clean_data(data: Any) -> Any: + if isinstance(data, dict): + cleaned: dict[str, Any] = {} + for key, value in data.items(): + cleaned_value = _clean_data(value) + if cleaned_value is None: + continue + cleaned[key] = cleaned_value + return cleaned + if isinstance(data, list): + cleaned_list = [_clean_data(item) for item in data] + return [item for item in cleaned_list if item is not None] + if data is None: + return None + return data + + +# ---------------------------------------------------------------------- +# Config directory resolution +# ---------------------------------------------------------------------- + + +def resolve_config_directory() -> Path: + """Determine the directory used to store tidy3d configuration files.""" + + base_override = os.getenv("TIDY3D_BASE_DIR") + if base_override: + path = Path(base_override).expanduser() / ".tidy3d" + if _is_writable(path.parent): + return path + log.warning( + "TIDY3D_BASE_DIR is not writable; using temporary configuration directory instead." + ) + return _temporary_config_dir() + + legacy_dir = Path.home() / ".tidy3d" + if legacy_dir.exists(): + log.warning( + "Configuration found in legacy location '~/.tidy3d'. Consider running 'tidy3d config migrate'.", + log_once=True, + ) + return legacy_dir + + canonical_dir = _xdg_config_home() / "tidy3d" + if _is_writable(canonical_dir.parent): + return canonical_dir + + log.warning( + "Unable to write to '%s'; falling back to temporary directory.", + canonical_dir, + ) + return _temporary_config_dir() + + +def _xdg_config_home() -> Path: + xdg_home = os.getenv("XDG_CONFIG_HOME") + if xdg_home: + return Path(xdg_home).expanduser() + return Path.home() / ".config" + + +def _temporary_config_dir() -> Path: + base = Path(tempfile.gettempdir()) / "tidy3d" + base.mkdir(mode=0o700, exist_ok=True) + return base / ".tidy3d" + + +def _is_writable(path: Path) -> bool: + try: + path.mkdir(parents=True, exist_ok=True) + test_file = path / ".tidy3d_write_test" + with open(test_file, "w", encoding="utf-8"): + pass + test_file.unlink() + return True + except Exception: + return False diff --git a/tidy3d/config/manager.py b/tidy3d/config/manager.py new file mode 100644 index 0000000000..b3db516f9e --- /dev/null +++ b/tidy3d/config/manager.py @@ -0,0 +1,477 @@ +"""Central configuration manager implementation.""" + +from __future__ import annotations + +import os +import shutil +from collections import defaultdict +from collections.abc import Iterable +from copy import deepcopy +from pathlib import Path +from typing import Any, Optional, get_args, get_origin + +from pydantic import BaseModel + +from tidy3d.log import log + +from .loader import ( + ConfigLoader, + deep_diff, + deep_merge, + load_environment_overrides, +) +from .profiles import BUILTIN_PROFILES +from .registry import attach_manager, get_handlers, get_sections + + +def normalize_profile_name(name: str) -> str: + """Return a canonical profile name for builtin profiles.""" + + normalized = name.strip() + lowered = normalized.lower() + if lowered in BUILTIN_PROFILES: + return lowered + return normalized + + +class SectionAccessor: + """Attribute proxy that routes assignments back through the manager.""" + + def __init__(self, manager: ConfigManager, path: str): + self._manager = manager + self._path = path + + def __getattr__(self, name: str) -> Any: + model = self._manager._get_model(self._path) + if model is None: + raise AttributeError(f"Section '{self._path}' is not available") + return getattr(model, name) + + def __setattr__(self, name: str, value: Any) -> None: + if name.startswith("_"): + object.__setattr__(self, name, value) + return + self._manager.update_section(self._path, **{name: value}) + + def __repr__(self) -> str: # pragma: no cover - debug helper + model = self._manager._get_model(self._path) + return f"SectionAccessor({self._path}={model!r})" + + def dict(self, *args, **kwargs): # type: ignore[override] + model = self._manager._get_model(self._path) + if model is None: + return {} + return model.model_dump(*args, **kwargs) + + +class PluginsAccessor: + """Provides access to registered plugin configurations.""" + + def __init__(self, manager: ConfigManager): + self._manager = manager + + def __getattr__(self, plugin: str) -> SectionAccessor: + if plugin not in self._manager._plugin_models: + raise AttributeError(f"Plugin '{plugin}' is not registered") + return SectionAccessor(self._manager, f"plugins.{plugin}") + + def list(self) -> Iterable[str]: + return sorted(self._manager._plugin_models.keys()) + + +class ProfilesAccessor: + """Read-only profile helper.""" + + def __init__(self, manager: ConfigManager): + self._manager = manager + + def list(self) -> dict[str, list[str]]: + return self._manager.list_profiles() + + def __getattr__(self, profile: str) -> dict[str, Any]: + return self._manager.preview_profile(profile) + + +class ConfigManager: + """High-level orchestrator for tidy3d configuration.""" + + def __init__( + self, + profile: Optional[str] = None, + config_dir: Optional[os.PathLike[str]] = None, + ): + loader_path = None if config_dir is None else Path(config_dir) + self._loader = ConfigLoader(loader_path) + self._runtime_overrides: dict[str, dict[str, Any]] = defaultdict(dict) + self._plugin_models: dict[str, BaseModel] = {} + self._section_models: dict[str, BaseModel] = {} + self._profile = self._resolve_initial_profile(profile) + self._builtin_data: dict[str, Any] = {} + self._base_data: dict[str, Any] = {} + self._profile_data: dict[str, Any] = {} + self._raw_tree: dict[str, Any] = {} + self._effective_tree: dict[str, Any] = {} + self._env_overrides: dict[str, Any] = load_environment_overrides() + + attach_manager(self) + self._reload() + self._apply_handlers() + + # ------------------------------------------------------------------ + # Properties + # ------------------------------------------------------------------ + @property + def profile(self) -> str: + return self._profile + + @property + def config_dir(self): + return self._loader.config_dir + + @property + def plugins(self) -> PluginsAccessor: + return PluginsAccessor(self) + + @property + def profiles(self) -> ProfilesAccessor: + return ProfilesAccessor(self) + + # ------------------------------------------------------------------ + # Public API + # ------------------------------------------------------------------ + def update_section(self, name: str, **updates: Any) -> None: + if not updates: + return + segments = name.split(".") + overrides = self._runtime_overrides[self._profile] + previous = deepcopy(overrides) + node = overrides + for segment in segments[:-1]: + node = node.setdefault(segment, {}) + section_key = segments[-1] + section_payload = node.setdefault(section_key, {}) + for key, value in updates.items(): + section_payload[key] = _serialize_value(value) + try: + self._reload() + except Exception: + self._runtime_overrides[self._profile] = previous + raise + self._apply_handlers(section=name) + + def switch_profile(self, profile: str) -> None: + if not profile: + raise ValueError("Profile name cannot be empty") + normalized = normalize_profile_name(profile) + if not normalized: + raise ValueError("Profile name cannot be empty") + self._profile = normalized + self._reload() + self._apply_handlers() + + def save(self, include_defaults: bool = False) -> None: + base_without_env = self._filter_persisted(self._compose_without_env()) + if include_defaults: + defaults = self._filter_persisted(self._default_tree()) + base_without_env = deep_merge(defaults, base_without_env) + + if self._profile == "default": + self._loader.save_base(base_without_env) + else: + baseline = self._filter_persisted(deep_merge(self._builtin_data, self._base_data)) + diff = deep_diff(baseline, base_without_env) + self._loader.save_profile(self._profile, diff) + # refresh cached base/profile data after saving + self._base_data = self._loader.load_base() + self._profile_data = self._loader.load_user_profile(self._profile) + self._reload() + + def reset_to_defaults(self, *, include_profiles: bool = True) -> None: + """Reset configuration files to their default annotated state.""" + + self._runtime_overrides = defaultdict(dict) + defaults = self._filter_persisted(self._default_tree()) + self._loader.save_base(defaults) + + if include_profiles: + profiles_dir = self._loader.profile_path("_dummy").parent + if profiles_dir.exists(): + shutil.rmtree(profiles_dir) + loader_docs = getattr(self._loader, "_docs", {}) + for path in list(loader_docs.keys()): + try: + path.relative_to(profiles_dir) + except ValueError: + continue + loader_docs.pop(path, None) + self._profile = "default" + + self._reload() + self._apply_handlers() + + def list_profiles(self) -> dict[str, list[str]]: + profiles_dir = self._loader.config_dir / "profiles" + user_profiles = [] + if profiles_dir.exists(): + for path in profiles_dir.glob("*.toml"): + user_profiles.append(path.stem) + built_in = sorted(name for name in BUILTIN_PROFILES.keys()) + return {"built_in": built_in, "user": sorted(user_profiles)} + + def preview_profile(self, profile: str) -> dict[str, Any]: + builtin = self._loader.get_builtin_profile(profile) + base = self._loader.load_base() + overrides = self._loader.load_user_profile(profile) + view = deep_merge(builtin, base, overrides) + return deepcopy(view) + + def get_section(self, name: str) -> BaseModel: + model = self._get_model(name) + if model is None: + raise AttributeError(f"Section '{name}' is not available") + return model + + def as_dict(self, include_env: bool = True) -> dict[str, Any]: + if include_env: + return deepcopy(self._effective_tree) + return self._compose_without_env() + + # ------------------------------------------------------------------ + # Registry callbacks + # ------------------------------------------------------------------ + def on_section_registered(self, section: str) -> None: # pragma: no cover - simple hook + self._reload() + self._apply_handlers(section=section) + + def on_handler_registered(self, section: str) -> None: # pragma: no cover - simple hook + self._apply_handlers(section=section) + + # ------------------------------------------------------------------ + # Internals + # ------------------------------------------------------------------ + def _resolve_initial_profile(self, profile: Optional[str]) -> str: + if profile: + return normalize_profile_name(str(profile)) + + candidate = ( + os.getenv("TIDY3D_CONFIG_PROFILE") + or os.getenv("TIDY3D_PROFILE") + or os.getenv("TIDY3D_ENV") + or "default" + ) + return normalize_profile_name(candidate) + + def _reload(self) -> None: + self._env_overrides = load_environment_overrides() + self._builtin_data = deepcopy(self._loader.get_builtin_profile(self._profile)) + self._base_data = deepcopy(self._loader.load_base()) + self._profile_data = deepcopy(self._loader.load_user_profile(self._profile)) + self._raw_tree = deep_merge(self._builtin_data, self._base_data, self._profile_data) + + runtime = deepcopy(self._runtime_overrides.get(self._profile, {})) + effective = deep_merge(self._raw_tree, runtime, self._env_overrides) + self._effective_tree = effective + self._build_models() + + def _build_models(self) -> None: + sections = get_sections() + self._section_models.clear() + self._plugin_models.clear() + + for name, schema in sections.items(): + if name.startswith("plugins."): + plugin_name = name.split(".", 1)[1] + plugin_data = _deep_get(self._effective_tree, ("plugins", plugin_name)) or {} + try: + self._plugin_models[plugin_name] = schema(**plugin_data) + except Exception as exc: # pragma: no cover - validation guard + log.error(f"Failed to load configuration for plugin '{plugin_name}': {exc}") + raise + continue + if name == "plugins": + continue + section_data = self._effective_tree.get(name, {}) + try: + self._section_models[name] = schema(**section_data) + except Exception as exc: # pragma: no cover + log.error(f"Failed to load configuration for section '{name}': {exc}") + raise + + def _get_model(self, name: str) -> Optional[BaseModel]: + if name.startswith("plugins."): + plugin = name.split(".", 1)[1] + return self._plugin_models.get(plugin) + return self._section_models.get(name) + + def _apply_handlers(self, section: Optional[str] = None) -> None: + handlers = get_handlers() + targets = [section] if section else handlers.keys() + for target in targets: + handler = handlers.get(target) + if handler is None: + continue + model = self._get_model(target) + if model is None: + continue + try: + handler(model) + except Exception as exc: + log.error(f"Failed to apply configuration handler for '{target}': {exc}") + + def _compose_without_env(self) -> dict[str, Any]: + runtime = self._runtime_overrides.get(self._profile, {}) + return deep_merge(self._raw_tree, runtime) + + def _default_tree(self) -> dict[str, Any]: + defaults: dict[str, Any] = {} + for name, schema in get_sections().items(): + if name.startswith("plugins."): + plugin = name.split(".", 1)[1] + defaults.setdefault("plugins", {})[plugin] = _model_dict(schema()) + elif name == "plugins": + defaults.setdefault("plugins", {}) + else: + defaults[name] = _model_dict(schema()) + return defaults + + def _filter_persisted(self, tree: dict[str, Any]) -> dict[str, Any]: + sections = get_sections() + filtered: dict[str, Any] = {} + plugins_source = tree.get("plugins", {}) + plugin_filtered: dict[str, Any] = {} + + for name, schema in sections.items(): + if name == "plugins": + continue + if name.startswith("plugins."): + plugin_name = name.split(".", 1)[1] + plugin_data = plugins_source.get(plugin_name, {}) + if not isinstance(plugin_data, dict): + continue + persisted_plugin = _extract_persisted(schema, plugin_data) + if persisted_plugin: + plugin_filtered[plugin_name] = persisted_plugin + continue + + section_data = tree.get(name, {}) + if not isinstance(section_data, dict): + continue + persisted_section = _extract_persisted(schema, section_data) + if persisted_section: + filtered[name] = persisted_section + + if plugin_filtered: + filtered["plugins"] = plugin_filtered + return filtered + + # ------------------------------------------------------------------ + # Python protocol hooks + # ------------------------------------------------------------------ + def __getattr__(self, name: str) -> Any: + if name in self._section_models: + return SectionAccessor(self, name) + if name == "plugins": + return self.plugins + raise AttributeError(f"Config has no section '{name}'") + + def __setattr__(self, name: str, value: Any) -> None: + if name.startswith("_"): + object.__setattr__(self, name, value) + return + if name in self._section_models: + if isinstance(value, BaseModel): + payload = value.model_dump(exclude_unset=False) + else: + payload = value + self.update_section(name, **payload) + return + object.__setattr__(self, name, value) + + +# ---------------------------------------------------------------------- +# Helpers +# ---------------------------------------------------------------------- + + +def _deep_get(tree: dict[str, Any], path: Iterable[str]) -> Optional[dict[str, Any]]: + node: Any = tree + for segment in path: + if not isinstance(node, dict): + return None + node = node.get(segment) + if node is None: + return None + return node if isinstance(node, dict) else None + + +def _resolve_model_type(annotation: Any) -> Optional[type[BaseModel]]: + """Return the first BaseModel subclass found in an annotation (if any).""" + + if isinstance(annotation, type) and issubclass(annotation, BaseModel): + return annotation + + origin = get_origin(annotation) + if origin is None: + return None + + for arg in get_args(annotation): + nested = _resolve_model_type(arg) + if nested is not None: + return nested + return None + + +def _serialize_value(value: Any) -> Any: + if isinstance(value, BaseModel): + return value.model_dump(exclude_unset=False) + if hasattr(value, "get_secret_value"): + return value.get_secret_value() + return value + + +def _model_dict(model: BaseModel) -> dict[str, Any]: + data = model.model_dump(exclude_unset=False) + for key, value in list(data.items()): + if hasattr(value, "get_secret_value"): + data[key] = value.get_secret_value() + return data + + +def _extract_persisted(schema: type[BaseModel], data: dict[str, Any]) -> dict[str, Any]: + persisted: dict[str, Any] = {} + for field_name, field in schema.model_fields.items(): + schema_extra = field.json_schema_extra or {} + annotation = field.annotation + persist = bool(schema_extra.get("persist")) if isinstance(schema_extra, dict) else False + if not persist: + continue + if field_name not in data: + continue + value = data[field_name] + if value is None: + persisted[field_name] = None + continue + + nested_type = _resolve_model_type(annotation) + if nested_type is not None: + nested_source = value if isinstance(value, dict) else {} + nested_persisted = _extract_persisted(nested_type, nested_source) + if nested_persisted: + persisted[field_name] = nested_persisted + continue + + if hasattr(value, "get_secret_value"): + persisted[field_name] = value.get_secret_value() + else: + persisted[field_name] = deepcopy(value) + + return persisted + + +__all__ = [ + "ConfigManager", + "PluginsAccessor", + "ProfilesAccessor", + "SectionAccessor", + "normalize_profile_name", +] diff --git a/tidy3d/config/profiles.py b/tidy3d/config/profiles.py new file mode 100644 index 0000000000..67a219929c --- /dev/null +++ b/tidy3d/config/profiles.py @@ -0,0 +1,59 @@ +"""Built-in configuration profiles for tidy3d.""" + +from __future__ import annotations + +from typing import Any + +BUILTIN_PROFILES: dict[str, dict[str, Any]] = { + "default": { + "web": { + "api_endpoint": "https://tidy3d-api.simulation.cloud", + "website_endpoint": "https://tidy3d.simulation.cloud", + "s3_region": "us-gov-west-1", + } + }, + "prod": { + "web": { + "api_endpoint": "https://tidy3d-api.simulation.cloud", + "website_endpoint": "https://tidy3d.simulation.cloud", + "s3_region": "us-gov-west-1", + } + }, + "dev": { + "web": { + "api_endpoint": "https://tidy3d-api.dev-simulation.cloud", + "website_endpoint": "https://tidy3d.dev-simulation.cloud", + "s3_region": "us-east-1", + } + }, + "uat": { + "web": { + "api_endpoint": "https://tidy3d-api.uat-simulation.cloud", + "website_endpoint": "https://tidy3d.uat-simulation.cloud", + "s3_region": "us-west-2", + } + }, + "pre": { + "web": { + "api_endpoint": "https://preprod-tidy3d-api.simulation.cloud", + "website_endpoint": "https://preprod-tidy3d.simulation.cloud", + "s3_region": "us-gov-west-1", + } + }, + "nexus": { + "web": { + "api_endpoint": "http://127.0.0.1:5000", + "website_endpoint": "http://127.0.0.1/tidy3d", + "ssl_verify": False, + "enable_caching": False, + "s3_region": "us-east-1", + "env_vars": { + "AWS_ENDPOINT_URL_S3": "http://127.0.0.1:9000", + }, + } + }, +} + +INTERNAL_PROFILES = {key for key in BUILTIN_PROFILES if key not in {"default"}} + +__all__ = ["BUILTIN_PROFILES", "INTERNAL_PROFILES"] diff --git a/tidy3d/config/registry.py b/tidy3d/config/registry.py new file mode 100644 index 0000000000..10203c1c25 --- /dev/null +++ b/tidy3d/config/registry.py @@ -0,0 +1,74 @@ +"""Registry utilities for tidy3d configuration sections and handlers.""" + +from __future__ import annotations + +from typing import Callable, Optional, TypeVar + +from pydantic import BaseModel + +T = TypeVar("T", bound=BaseModel) + +_SECTIONS: dict[str, type[BaseModel]] = {} +_HANDLERS: dict[str, Callable[[BaseModel], None]] = {} +_MANAGER: Optional[ConfigManagerProtocol] = None + + +class ConfigManagerProtocol: + """Protocol-like interface for manager notifications.""" + + def on_section_registered(self, section: str) -> None: # pragma: no cover - interface hook + """Called when a new section schema is registered.""" + + def on_handler_registered(self, section: str) -> None: # pragma: no cover - interface hook + """Called when a handler is registered.""" + + +def attach_manager(manager: ConfigManagerProtocol) -> None: + """Attach the active configuration manager for registry callbacks.""" + + global _MANAGER + _MANAGER = manager + + +def register_section(name: str) -> Callable[[type[T]], type[T]]: + """Decorator to register a configuration section schema.""" + + def decorator(cls: type[T]) -> type[T]: + _SECTIONS[name] = cls + if _MANAGER is not None: + _MANAGER.on_section_registered(name) + return cls + + return decorator + + +def register_plugin(name: str) -> Callable[[type[T]], type[T]]: + """Decorator to register a plugin configuration schema.""" + + return register_section(f"plugins.{name}") + + +def register_handler( + name: str, +) -> Callable[[Callable[[BaseModel], None]], Callable[[BaseModel], None]]: + """Decorator to register a handler for a configuration section.""" + + def decorator(func: Callable[[BaseModel], None]) -> Callable[[BaseModel], None]: + _HANDLERS[name] = func + if _MANAGER is not None: + _MANAGER.on_handler_registered(name) + return func + + return decorator + + +def get_sections() -> dict[str, type[BaseModel]]: + """Return registered section schemas.""" + + return dict(_SECTIONS) + + +def get_handlers() -> dict[str, Callable[[BaseModel], None]]: + """Return registered configuration handlers.""" + + return dict(_HANDLERS) diff --git a/tidy3d/config/sections.py b/tidy3d/config/sections.py new file mode 100644 index 0000000000..280141d49c --- /dev/null +++ b/tidy3d/config/sections.py @@ -0,0 +1,352 @@ +"""Built-in configuration section schemas and handlers.""" + +from __future__ import annotations + +import os +import ssl +from typing import Any, Literal, Optional +from urllib.parse import urlparse + +import numpy as np +from pydantic import BaseModel, ConfigDict, Field, PositiveInt, SecretStr, field_validator + +from tidy3d.log import DEFAULT_LEVEL, LogLevel, set_log_suppression, set_logging_level +from tidy3d.packaging import set_use_local_subpixel + +from .registry import register_handler, register_section + + +class ConfigSection(BaseModel): + """Base class for configuration sections.""" + + model_config = ConfigDict(extra="forbid", validate_assignment=True) + + def to_dict(self, *, mask_secrets: bool = True) -> dict[str, Any]: + """Convert section to a serializable dictionary.""" + + data = self.model_dump(exclude_unset=True) + if mask_secrets: + return data + + unmasked: dict[str, Any] = {} + for key, value in data.items(): + if isinstance(value, SecretStr): + unmasked[key] = value.get_secret_value() + else: + unmasked[key] = value + return unmasked + + +@register_section("logging") +class LoggingConfig(ConfigSection): + """Logging configuration.""" + + level: LogLevel = Field( + DEFAULT_LEVEL, + title="Logging level", + description="Lowest logging level that will be emitted.", + json_schema_extra={"persist": True}, + ) + + suppression: bool = Field( + True, + title="Log suppression", + description="Suppress repeated log messages when True.", + ) + + +@register_handler("logging") +def apply_logging(config: LoggingConfig) -> None: + """Apply logging configuration globally.""" + + set_logging_level(config.level) + set_log_suppression(config.suppression) + + +@register_section("simulation") +class SimulationConfig(ConfigSection): + """Simulation-related configuration.""" + + use_local_subpixel: Optional[bool] = Field( + None, + title="Use local subpixel", + description=( + "If True, force local subpixel averaging; False disables it; None keeps default behavior." + ), + ) + + +@register_handler("simulation") +def apply_simulation(config: SimulationConfig) -> None: + """Apply simulation configuration hooks.""" + + if config.use_local_subpixel is not None: + set_use_local_subpixel(config.use_local_subpixel) + + +@register_section("autograd") +class AutogradConfig(ConfigSection): + """Autograd configuration section.""" + + min_wvl_fraction: float = Field( + 5e-2, + title="Minimum wavelength fraction", + description=( + "Minimum fraction of the smallest free-space wavelength used when discretizing " + "cylindrical structures for autograd derivatives." + ), + ge=0.0, + ) + + points_per_wavelength: PositiveInt = Field( + 10, + title="Points per wavelength", + description=( + "Default number of material sample points per wavelength when discretizing " + "cylinders for autograd derivatives." + ), + ) + + default_wavelength_fraction: float = Field( + 0.1, + title="Default wavelength fraction", + description=( + "Fallback fraction of the minimum wavelength used when autograd needs to " + "estimate adaptive spacing." + ), + ge=0.0, + ) + + minimum_spacing_fraction: float = Field( + 1e-2, + title="Minimum spacing fraction", + description=( + "Minimum normalized spacing allowed when constructing adaptive finite-difference " + "stencils for autograd evaluations." + ), + ge=0.0, + ) + + gradient_precision: Literal["single", "double"] = Field( + "single", + title="Gradient precision", + description="Floating-point precision used for autograd gradient calculations.", + json_schema_extra={"persist": True}, + ) + + monitor_interval_poly: tuple[int, int, int] = Field( + (1, 1, 1), + title="Polynomial monitor spacing", + description=( + "Default spatial interval (in cells) between samples for polynomial autograd monitors." + ), + ) + + monitor_interval_custom: tuple[int, int, int] = Field( + (1, 1, 1), + title="Custom monitor spacing", + description=( + "Default spatial interval (in cells) between samples for custom autograd monitors." + ), + ) + + quadrature_sample_fraction: float = Field( + 0.4, + title="Quadrature sample fraction", + description=( + "Fraction of uniform samples reused when building Gauss quadrature nodes for " + "autograd surface integrations." + ), + ge=0.0, + le=1.0, + ) + + gauss_quadrature_order: PositiveInt = Field( + 7, + title="Gauss quadrature order", + description=( + "Maximum Gauss-Legendre order used when constructing composite quadrature rules " + "for autograd surface integrations." + ), + ) + + edge_clip_tolerance: float = Field( + 1e-9, + title="Edge clipping tolerance", + description=( + "Padding tolerance applied when clipping polygon edges against simulation bounds " + "in autograd surface integrations." + ), + ge=0.0, + ) + + solver_freq_chunk_size: Optional[PositiveInt] = Field( + None, + title="Adjoint frequency chunk size", + description=( + "Maximum number of frequencies to process per chunk during adjoint gradient " + "evaluation. Use `None` to disable chunking." + ), + ) + + max_traced_structures: PositiveInt = Field( + 500, + title="Max traced structures", + description="Maximum number of structures that can have traced fields in an autograd run.", + json_schema_extra={"persist": True}, + ) + + max_adjoint_per_fwd: PositiveInt = Field( + 10, + title="Max adjoint solves per forward", + description="Maximum number of adjoint simulations dispatched per forward solve.", + json_schema_extra={"persist": True}, + ) + + @property + def gradient_dtype_float(self) -> np.dtype: + """Floating-point dtype implied by ``gradient_precision``.""" + + return np.float64 if self.gradient_precision == "double" else np.float32 + + @property + def gradient_dtype_complex(self) -> np.dtype: + """Complex dtype implied by ``gradient_precision``.""" + + return np.complex128 if self.gradient_precision == "double" else np.complex64 + + +_PREVIOUS_WEB_ENV: dict[str, Optional[str]] = {} + + +@register_section("web") +class WebConfig(ConfigSection): + """Web/HTTP configuration.""" + + model_config = ConfigDict( + json_encoders={ssl.TLSVersion: (lambda value: value.value if value else None)} + ) + + apikey: Optional[SecretStr] = Field( + None, + title="API key", + description="Tidy3D API key.", + json_schema_extra={"persist": True}, + ) + + ssl_verify: bool = Field( + True, + title="SSL verification", + description="Verify SSL certificates for API requests.", + ) + + enable_caching: bool = Field( + True, + title="Enable server-side caching", + description="Allow the web service to return cached simulation results.", + json_schema_extra={"persist": True}, + ) + + api_endpoint: str = Field( + "https://tidy3d-api.simulation.cloud", + title="API endpoint", + description="Tidy3D API base URL.", + ) + + website_endpoint: str = Field( + "https://tidy3d.simulation.cloud", + title="Website endpoint", + description="Tidy3D website URL.", + ) + + s3_region: str = Field( + "us-gov-west-1", + title="S3 region", + description="AWS S3 region used by the platform.", + ) + + timeout: int = Field( + 120, + title="HTTP timeout", + description="HTTP request timeout in seconds.", + ge=0, + le=300, + ) + + ssl_version: Optional[ssl.TLSVersion] = Field( + None, + title="SSL/TLS version", + description="Optional SSL/TLS version to enforce for requests.", + ) + + env_vars: dict[str, str] = Field( + default_factory=dict, + title="Environment variable overrides", + description="Environment variables to export when this config is applied.", + ) + + def to_dict(self, *, mask_secrets: bool = True) -> dict[str, Any]: + data = super().to_dict(mask_secrets=mask_secrets) + if mask_secrets: + if isinstance(data.get("apikey"), SecretStr): + data["apikey"] = None + else: + secret = data.get("apikey") + if isinstance(secret, SecretStr): + data["apikey"] = secret.get_secret_value() + for field in ("api_endpoint", "website_endpoint"): + if field in data and data[field] is not None: + data[field] = str(data[field]) + return data + + @field_validator("api_endpoint", "website_endpoint", mode="before") + @classmethod + def _validate_http_url(cls, value: Any) -> str: + if value is None: + return value + parsed = urlparse(str(value)) + if parsed.scheme not in {"http", "https"} or not parsed.netloc: + raise ValueError("Value must be an HTTP or HTTPS URL") + normalized = parsed.geturl() + if ( + parsed.path in {"", "/"} + and not parsed.params + and not parsed.query + and not parsed.fragment + ): + normalized = normalized.rstrip("/") + return normalized + + +@register_handler("web") +def apply_web(config: WebConfig) -> None: + """Apply web-related environment variable overrides.""" + + # restore previous values + for key, previous in _PREVIOUS_WEB_ENV.items(): + if previous is None: + os.environ.pop(key, None) + else: + os.environ[key] = previous + _PREVIOUS_WEB_ENV.clear() + + if config.env_vars: + for key, value in config.env_vars.items(): + _PREVIOUS_WEB_ENV[key] = os.environ.get(key) + os.environ[key] = value + + +@register_section("plugins") +class PluginsContainer(ConfigSection): + """Container that holds plugin-specific configuration sections.""" + + model_config = ConfigDict(extra="allow") + + +__all__ = [ + "AutogradConfig", + "LoggingConfig", + "PluginsContainer", + "SimulationConfig", + "WebConfig", +] diff --git a/tidy3d/config/serializer.py b/tidy3d/config/serializer.py new file mode 100644 index 0000000000..5d6e60b2cc --- /dev/null +++ b/tidy3d/config/serializer.py @@ -0,0 +1,145 @@ +from __future__ import annotations + +from collections.abc import Iterable +from typing import Any, get_args, get_origin + +import tomlkit +from pydantic import BaseModel +from pydantic.fields import FieldInfo +from tomlkit.items import Item, Table + +from .registry import get_sections + +Path = tuple[str, ...] + + +def collect_descriptions() -> dict[Path, str]: + """Collect description strings for registered configuration fields.""" + + descriptions: dict[Path, str] = {} + for section_name, model in get_sections().items(): + base_path = tuple(segment for segment in section_name.split(".") if segment) + section_doc = (model.__doc__ or "").strip() + if section_doc and base_path: + descriptions[base_path] = descriptions.get( + base_path, section_doc.splitlines()[0].strip() + ) + for field_name, field in model.model_fields.items(): + descriptions.update(_describe_field(field, prefix=(*base_path, field_name))) + return descriptions + + +def _describe_field(field: FieldInfo, prefix: Path) -> dict[Path, str]: + descriptions: dict[Path, str] = {} + description = (field.description or "").strip() + if description: + descriptions[prefix] = description + + nested_models: Iterable[type[BaseModel]] = _iter_model_types(field.annotation) + for model in nested_models: + nested_doc = (model.__doc__ or "").strip() + if nested_doc: + descriptions[prefix] = descriptions.get(prefix, nested_doc.splitlines()[0].strip()) + for sub_name, sub_field in model.model_fields.items(): + descriptions.update(_describe_field(sub_field, prefix=(*prefix, sub_name))) + return descriptions + + +def _iter_model_types(annotation: Any) -> Iterable[type[BaseModel]]: + """Yield BaseModel subclasses referenced by a field annotation (if any).""" + + if annotation is None: + return + + stack = [annotation] + seen: set[type[BaseModel]] = set() + + while stack: + current = stack.pop() + if isinstance(current, type) and issubclass(current, BaseModel): + if current not in seen: + seen.add(current) + yield current + continue + + origin = get_origin(current) + if origin is None: + continue + + stack.extend(get_args(current)) + + +def build_document( + data: dict[str, Any], + existing: tomlkit.TOMLDocument | None, + descriptions: dict[Path, str] | None = None, +) -> tomlkit.TOMLDocument: + """Return a TOML document populated with data and annotated comments.""" + + descriptions = descriptions or collect_descriptions() + document = existing if existing is not None else tomlkit.document() + _prune_missing_keys(document, data.keys()) + for key, value in data.items(): + _apply_value( + container=document, + key=key, + value=value, + path=(key,), + descriptions=descriptions, + is_new=key not in document, + ) + return document + + +def _prune_missing_keys(container: Table | tomlkit.TOMLDocument, keys: Iterable[str]) -> None: + desired = set(keys) + for existing_key in list(container.keys()): + if existing_key not in desired: + del container[existing_key] + + +def _apply_value( + container: Table | tomlkit.TOMLDocument, + key: str, + value: Any, + path: Path, + descriptions: dict[Path, str], + is_new: bool, +) -> None: + description = descriptions.get(path) + if isinstance(value, dict): + existing = container.get(key) + table = existing if isinstance(existing, Table) else tomlkit.table() + _prune_missing_keys(table, value.keys()) + for sub_key, sub_value in value.items(): + _apply_value( + container=table, + key=sub_key, + value=sub_value, + path=(*path, sub_key), + descriptions=descriptions, + is_new=not isinstance(existing, Table) or sub_key not in table, + ) + if key in container: + container[key] = table + else: + if description: + container.add(tomlkit.comment(description)) + container.add(key, table) + return + + if value is None: + return + + existing_item = container.get(key) + new_item = tomlkit.item(value) + if isinstance(existing_item, Item): + new_item.trivia.comment = existing_item.trivia.comment + new_item.trivia.comment_ws = existing_item.trivia.comment_ws + elif description: + new_item.comment(description) + + if key in container: + container[key] = new_item + else: + container.add(key, new_item) diff --git a/tidy3d/packaging.py b/tidy3d/packaging.py index 930ff36524..36d6351f06 100644 --- a/tidy3d/packaging.py +++ b/tidy3d/packaging.py @@ -12,7 +12,6 @@ import numpy as np -from .config import config from .exceptions import Tidy3dImportError from .log import log from .version import __version__ @@ -26,6 +25,7 @@ } tidy3d_extras = {"mod": None, "use_local_subpixel": None} +_CONFIG_LOCAL_SUBPIXEL: bool | None = None def check_import(module_name: str) -> bool: @@ -182,13 +182,20 @@ def get_numpy_major_version(module=np): return major_version +def set_use_local_subpixel(value: bool | None) -> None: + """Set the preferred use of local subpixel averaging.""" + + global _CONFIG_LOCAL_SUBPIXEL + _CONFIG_LOCAL_SUBPIXEL = value + + def supports_local_subpixel(fn): """When decorating a method, checks that 'tidy3d-extras' is available, conditioned on 'config.use_local_subpixel'.""" @functools.wraps(fn) def _fn(*args, **kwargs): - if config.use_local_subpixel is False: + if _CONFIG_LOCAL_SUBPIXEL is False: tidy3d_extras["use_local_subpixel"] = False tidy3d_extras["mod"] = None else: @@ -200,7 +207,7 @@ def _fn(*args, **kwargs): except ImportError as exc: tidy3d_extras["mod"] = None tidy3d_extras["use_local_subpixel"] = False - if config.use_local_subpixel is True: + if _CONFIG_LOCAL_SUBPIXEL is True: raise Tidy3dImportError( "The package 'tidy3d-extras' is required for this " "operation when 'config.use_local_subpixel' is 'True'. " @@ -242,10 +249,11 @@ def disable_local_subpixel(fn): @functools.wraps(fn) def _fn(*args, **kwargs): - use_local_subpixel = config.use_local_subpixel - config.use_local_subpixel = False + global _CONFIG_LOCAL_SUBPIXEL + use_local_subpixel = _CONFIG_LOCAL_SUBPIXEL + _CONFIG_LOCAL_SUBPIXEL = False result = fn(*args, **kwargs) - config.use_local_subpixel = use_local_subpixel + _CONFIG_LOCAL_SUBPIXEL = use_local_subpixel return result return _fn diff --git a/tidy3d/plugins/smatrix/component_modelers/base.py b/tidy3d/plugins/smatrix/component_modelers/base.py index 3d13f8646a..48878afcfe 100644 --- a/tidy3d/plugins/smatrix/component_modelers/base.py +++ b/tidy3d/plugins/smatrix/component_modelers/base.py @@ -7,7 +7,6 @@ import pydantic.v1 as pd -from tidy3d.components.autograd.constants import MAX_NUM_ADJOINT_PER_FWD from tidy3d.components.base import Tidy3dBaseModel, cached_property from tidy3d.components.geometry.utils import _shift_value_signed from tidy3d.components.simulation import Simulation @@ -18,6 +17,7 @@ validate_freqs_not_empty, validate_freqs_unique, ) +from tidy3d.config import config from tidy3d.constants import HERTZ from tidy3d.exceptions import SetupError, Tidy3dKeyError from tidy3d.log import log @@ -262,7 +262,7 @@ def run( pay_type: Union[PayType, str] = "AUTO", priority: Optional[int] = None, local_gradient: bool = False, - max_num_adjoint_per_fwd: int = MAX_NUM_ADJOINT_PER_FWD, + max_num_adjoint_per_fwd: Optional[int] = None, ): log.warning( "'ComponentModeler.run()' is deprecated and will be removed in a future release. " @@ -272,6 +272,9 @@ def run( ) from tidy3d.plugins.smatrix.run import _run_local + if max_num_adjoint_per_fwd is None: + max_num_adjoint_per_fwd = config.autograd.max_adjoint_per_fwd + data = _run_local( self, path_dir=path_dir, diff --git a/tidy3d/web/api/autograd/autograd.py b/tidy3d/web/api/autograd/autograd.py index 7958dc57c9..8b7f7e14bb 100644 --- a/tidy3d/web/api/autograd/autograd.py +++ b/tidy3d/web/api/autograd/autograd.py @@ -11,10 +11,6 @@ import tidy3d as td from tidy3d.components.autograd import AutogradFieldMap -from tidy3d.components.autograd.constants import ( - MAX_NUM_ADJOINT_PER_FWD, - MAX_NUM_TRACED_STRUCTURES, -) from tidy3d.components.types.workflow import WorkflowDataType, WorkflowType from tidy3d.exceptions import AdjointError from tidy3d.web.api.asynchronous import DEFAULT_DATA_DIR @@ -59,6 +55,26 @@ _INSPECT_ADJOINT_PLANE = td.Box(center=(0, 0, 0), size=(td.inf, td.inf, 0)) +def _max_traced_structures() -> int: + from tidy3d.config import config + + return config.autograd.max_traced_structures + + +def _max_adjoint_per_fwd() -> int: + from tidy3d.config import config + + return config.autograd.max_adjoint_per_fwd + + +def get_max_traced_structures() -> int: + return _max_traced_structures() + + +def get_max_adjoint_per_fwd() -> int: + return _max_adjoint_per_fwd() + + def is_valid_for_autograd(simulation: td.Simulation) -> bool: """Check whether a supplied Simulation can use the autograd path.""" if not isinstance(simulation, td.Simulation): @@ -81,9 +97,10 @@ def is_valid_for_autograd(simulation: td.Simulation) -> bool: # if too many structures, raise an error structure_indices = {i for key, i, *_ in traced_fields.keys() if key == "structures"} num_traced_structures = len(structure_indices) - if num_traced_structures > MAX_NUM_TRACED_STRUCTURES: + max_structures = _max_traced_structures() + if num_traced_structures > max_structures: raise AdjointError( - f"Autograd support is currently limited to {MAX_NUM_TRACED_STRUCTURES} structures with " + f"Autograd support is currently limited to {max_structures} structures with " f"traced fields. Found {num_traced_structures} structures with traced fields." ) @@ -113,7 +130,7 @@ def run( simulation_type: str = "tidy3d", parent_tasks: typing.Optional[list[str]] = None, local_gradient: bool = LOCAL_GRADIENT, - max_num_adjoint_per_fwd: int = MAX_NUM_ADJOINT_PER_FWD, + max_num_adjoint_per_fwd: typing.Optional[int] = None, reduce_simulation: typing.Literal["auto", True, False] = "auto", pay_type: typing.Union[PayType, str] = PayType.AUTO, priority: typing.Optional[int] = None, @@ -150,8 +167,8 @@ def run( local_gradient: bool = False Whether to perform gradient calculation locally, requiring more downloads but potentially more stable with experimental features. - max_num_adjoint_per_fwd: int = 10 - Maximum number of adjoint simulations allowed to run automatically. + max_num_adjoint_per_fwd: typing.Optional[int] = None + Maximum number of adjoint simulations allowed to run automatically. Uses the autograd configuration when None. reduce_simulation: Literal["auto", True, False] = "auto" Whether to reduce structures in the simulation to the simulation domain only. Note: currently only implemented for the mode solver. pay_type: typing.Union[PayType, str] = PayType.AUTO @@ -202,6 +219,9 @@ def run( :meth:`tidy3d.web.api.container.Batch.monitor` Monitor progress of each of the running tasks. """ + if max_num_adjoint_per_fwd is None: + max_num_adjoint_per_fwd = _max_adjoint_per_fwd() + if priority is not None and (priority < 1 or priority > 10): raise ValueError("Priority must be between '1' and '10' if specified.") @@ -280,7 +300,7 @@ def run_async( solver_version: typing.Optional[str] = None, parent_tasks: typing.Optional[dict[str, list[str]]] = None, local_gradient: bool = LOCAL_GRADIENT, - max_num_adjoint_per_fwd: int = MAX_NUM_ADJOINT_PER_FWD, + max_num_adjoint_per_fwd: typing.Optional[int] = None, reduce_simulation: typing.Literal["auto", True, False] = "auto", pay_type: typing.Union[PayType, str] = PayType.AUTO, priority: typing.Optional[int] = None, @@ -312,8 +332,8 @@ def run_async( local_gradient: bool = False Whether to perform gradient calculations locally, requiring more downloads but potentially more stable with experimental features. - max_num_adjoint_per_fwd: int = 10 - Maximum number of adjoint simulations allowed to run automatically. + max_num_adjoint_per_fwd: typing.Optional[int] = None + Maximum number of adjoint simulations allowed to run automatically. Uses the autograd configuration when None. reduce_simulation: Literal["auto", True, False] = "auto" Whether to reduce structures in the simulation to the simulation domain only. Note: currently only implemented for the mode solver. pay_type: typing.Union[PayType, str] = PayType.AUTO @@ -338,6 +358,9 @@ def run_async( if priority is not None and (priority < 1 or priority > 10): raise ValueError("Priority must be between '1' and '10' if specified.") + if max_num_adjoint_per_fwd is None: + max_num_adjoint_per_fwd = _max_adjoint_per_fwd() + if isinstance(simulations, (tuple, list)): sim_dict = {} for i, sim in enumerate(simulations, 1): @@ -385,7 +408,7 @@ def _run( simulation: td.Simulation, task_name: str, local_gradient: bool = LOCAL_GRADIENT, - max_num_adjoint_per_fwd: int = MAX_NUM_ADJOINT_PER_FWD, + max_num_adjoint_per_fwd: typing.Optional[int] = None, **run_kwargs, ) -> td.SimulationData: """User-facing ``web.run`` function, compatible with ``autograd`` differentiation.""" @@ -423,7 +446,7 @@ def _run( def _run_async( simulations: dict[str, td.Simulation], local_gradient: bool = LOCAL_GRADIENT, - max_num_adjoint_per_fwd: int = MAX_NUM_ADJOINT_PER_FWD, + max_num_adjoint_per_fwd: typing.Optional[int] = None, **run_async_kwargs, ) -> dict[str, td.SimulationData]: """User-facing ``web.run_async`` function, compatible with ``autograd`` differentiation.""" @@ -984,3 +1007,11 @@ def _run_async_tidy3d_bwd( ) -> dict[str, AutogradFieldMap]: """Run a batch of adjoint simulations via engine wrapper (delegated).""" return _run_async_tidy3d_bwd_engine(simulations=simulations, **run_kwargs) + + +def __getattr__(name: str): + if name == "MAX_NUM_TRACED_STRUCTURES": + return _max_traced_structures() + if name == "MAX_NUM_ADJOINT_PER_FWD": + return _max_adjoint_per_fwd() + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/tidy3d/web/api/autograd/backward.py b/tidy3d/web/api/autograd/backward.py index 799f4dd95f..e8f46c1e54 100644 --- a/tidy3d/web/api/autograd/backward.py +++ b/tidy3d/web/api/autograd/backward.py @@ -7,8 +7,8 @@ import tidy3d as td from tidy3d.components.autograd import AutogradFieldMap, get_static -from tidy3d.components.autograd.constants import ADJOINT_FREQ_CHUNK_SIZE from tidy3d.components.autograd.derivative_utils import DerivativeInfo +from tidy3d.config import config from tidy3d.exceptions import AdjointError from .utils import E_to_D, get_derivative_maps @@ -214,10 +214,12 @@ def postprocess_adj( bounds_intersect = (rmin_intersect, rmax_intersect) # get chunk size - if None, process all frequencies as one chunk - freq_chunk_size = ADJOINT_FREQ_CHUNK_SIZE + freq_chunk_size = config.autograd.solver_freq_chunk_size n_freqs = len(adjoint_frequencies) - if freq_chunk_size is None: + if not freq_chunk_size or freq_chunk_size <= 0: freq_chunk_size = n_freqs + else: + freq_chunk_size = min(freq_chunk_size, n_freqs) # process in chunks vjp_value_map = {} diff --git a/tidy3d/web/api/autograd/ops_backward.py b/tidy3d/web/api/autograd/ops_backward.py index 799f4dd95f..e8f46c1e54 100644 --- a/tidy3d/web/api/autograd/ops_backward.py +++ b/tidy3d/web/api/autograd/ops_backward.py @@ -7,8 +7,8 @@ import tidy3d as td from tidy3d.components.autograd import AutogradFieldMap, get_static -from tidy3d.components.autograd.constants import ADJOINT_FREQ_CHUNK_SIZE from tidy3d.components.autograd.derivative_utils import DerivativeInfo +from tidy3d.config import config from tidy3d.exceptions import AdjointError from .utils import E_to_D, get_derivative_maps @@ -214,10 +214,12 @@ def postprocess_adj( bounds_intersect = (rmin_intersect, rmax_intersect) # get chunk size - if None, process all frequencies as one chunk - freq_chunk_size = ADJOINT_FREQ_CHUNK_SIZE + freq_chunk_size = config.autograd.solver_freq_chunk_size n_freqs = len(adjoint_frequencies) - if freq_chunk_size is None: + if not freq_chunk_size or freq_chunk_size <= 0: freq_chunk_size = n_freqs + else: + freq_chunk_size = min(freq_chunk_size, n_freqs) # process in chunks vjp_value_map = {} diff --git a/tidy3d/web/cli/app.py b/tidy3d/web/cli/app.py index 76a9572f09..5388a5fb86 100644 --- a/tidy3d/web/cli/app.py +++ b/tidy3d/web/cli/app.py @@ -10,11 +10,11 @@ import click import requests -import toml -from tidy3d.web.cli.constants import CONFIG_FILE, CREDENTIAL_FILE, TIDY3D_DIR +from tidy3d.config import config, get_manager +from tidy3d.web.cli.constants import CREDENTIAL_FILE, TIDY3D_DIR from tidy3d.web.cli.migrate import migrate -from tidy3d.web.core.constants import HEADER_APIKEY, KEY_APIKEY +from tidy3d.web.core.constants import HEADER_APIKEY from tidy3d.web.core.environment import Env from .develop.index import develop @@ -31,12 +31,15 @@ def get_description(): The description for the config command. """ - if os.path.exists(CONFIG_FILE): - with open(CONFIG_FILE, encoding="utf-8") as f: - content = f.read() - config = toml.loads(content) - return config.get(KEY_APIKEY, "") - return "" + try: + apikey = config.web.apikey + except AttributeError: + return "" + if apikey is None: + return "" + if hasattr(apikey, "get_secret_value"): + return apikey.get_secret_value() + return str(apikey) @click.group() @@ -106,10 +109,8 @@ def auth(req): if resp.status_code == 200: click.echo("Configured successfully.") - with open(CONFIG_FILE, "w+", encoding="utf-8") as config_file: - toml_config = toml.loads(config_file.read()) - toml_config.update({KEY_APIKEY: apikey}) - config_file.write(toml.dumps(toml_config)) + config.update_section("web", apikey=apikey) + config.save() else: click.echo("API key is invalid.") @@ -132,7 +133,29 @@ def convert(lsf_file, new_file): ) +@click.command("config-reset") +@click.option("--yes", is_flag=True, help="Do not prompt before resetting the configuration.") +@click.option( + "--preserve-profiles", + is_flag=True, + help="Keep user profile overrides instead of deleting them.", +) +def config_reset(yes: bool, preserve_profiles: bool) -> None: + """Reset tidy3d configuration files to the default annotated state.""" + + if not yes: + message = "Reset configuration to defaults?" + if not preserve_profiles: + message += " This will delete user profiles." + click.confirm(message, abort=True) + + manager = get_manager() + manager.reset_to_defaults(include_profiles=not preserve_profiles) + click.echo("Configuration reset to defaults.") + + tidy3d_cli.add_command(configure) tidy3d_cli.add_command(migration) tidy3d_cli.add_command(convert) tidy3d_cli.add_command(develop) +tidy3d_cli.add_command(config_reset) diff --git a/tidy3d/web/cli/constants.py b/tidy3d/web/cli/constants.py index 60961f28fe..78c9e9ac19 100644 --- a/tidy3d/web/cli/constants.py +++ b/tidy3d/web/cli/constants.py @@ -2,15 +2,10 @@ from __future__ import annotations -import os -from os.path import expanduser +from tidy3d.config.loader import resolve_config_directory -TIDY3D_BASE_DIR = os.getenv("TIDY3D_BASE_DIR", f"{expanduser('~')}") +_CONFIG_ROOT = resolve_config_directory() -if os.access(TIDY3D_BASE_DIR, os.W_OK): - TIDY3D_DIR = f"{TIDY3D_BASE_DIR}/.tidy3d" -else: - TIDY3D_DIR = "/tmp/.tidy3d" - -CONFIG_FILE = TIDY3D_DIR + "/config" -CREDENTIAL_FILE = TIDY3D_DIR + "/auth.json" +TIDY3D_DIR = str(_CONFIG_ROOT) +CONFIG_FILE = str(_CONFIG_ROOT / "config.toml") +CREDENTIAL_FILE = str(_CONFIG_ROOT / "auth.json") diff --git a/tidy3d/web/cli/migrate.py b/tidy3d/web/cli/migrate.py index 17fbd3c7ed..8a6348a60f 100644 --- a/tidy3d/web/cli/migrate.py +++ b/tidy3d/web/cli/migrate.py @@ -7,12 +7,12 @@ import click import requests -import toml -from tidy3d.web.core.constants import HEADER_APPLICATION, HEADER_APPLICATION_VALUE, KEY_APIKEY +from tidy3d.config import config +from tidy3d.web.core.constants import HEADER_APPLICATION, HEADER_APPLICATION_VALUE from tidy3d.web.core.environment import Env -from .constants import CONFIG_FILE, CREDENTIAL_FILE, TIDY3D_DIR +from .constants import CREDENTIAL_FILE, TIDY3D_DIR def migrate() -> bool: @@ -61,10 +61,8 @@ def migrate() -> bool: apikey = resp.json()["data"] if not os.path.exists(TIDY3D_DIR): os.mkdir(TIDY3D_DIR) - with open(CONFIG_FILE, "w+", encoding="utf-8") as config_file: - toml_config = toml.loads(config_file.read()) - toml_config.update({KEY_APIKEY: apikey}) - config_file.write(toml.dumps(toml_config)) + config.update_section("web", apikey=apikey) + config.save() # rename auth.json to auth.json.bak os.rename(CREDENTIAL_FILE, CREDENTIAL_FILE + ".bak") diff --git a/tidy3d/web/core/environment.py b/tidy3d/web/core/environment.py index 261f53ea93..db4363902b 100644 --- a/tidy3d/web/core/environment.py +++ b/tidy3d/web/core/environment.py @@ -1,241 +1,7 @@ -"""Environment Setup.""" +"""Legacy re-export of configuration environment helpers.""" from __future__ import annotations -import os -import ssl -from typing import Optional +from tidy3d.config import Env, Environment, EnvironmentConfig -from pydantic.v1 import BaseSettings, Field - -from .core_config import get_logger - - -class EnvironmentConfig(BaseSettings): - """Basic Configuration for definition environment.""" - - def __hash__(self): - return hash((type(self), *tuple(self.__dict__.values()))) - - name: str - web_api_endpoint: str - website_endpoint: str - s3_region: str - ssl_verify: bool = Field(True, env="TIDY3D_SSL_VERIFY") - enable_caching: Optional[bool] = None - ssl_version: Optional[ssl.TLSVersion] = None - env_vars: Optional[dict[str, str]] = None - - def active(self) -> None: - """Activate the environment instance.""" - Env.set_current(self) - - def get_real_url(self, path: str) -> str: - """Get the real url for the environment instance. - - Parameters - ---------- - path : str - Base path to append to web api endpoint. - - Returns - ------- - str - Full url for the webapi. - """ - return "/".join([self.web_api_endpoint, path]) - - -dev = EnvironmentConfig( - name="dev", - s3_region="us-east-1", - web_api_endpoint="https://tidy3d-api.dev-simulation.cloud", - website_endpoint="https://tidy3d.dev-simulation.cloud", -) - -uat = EnvironmentConfig( - name="uat", - s3_region="us-west-2", - web_api_endpoint="https://tidy3d-api.uat-simulation.cloud", - website_endpoint="https://tidy3d.uat-simulation.cloud", -) - -pre = EnvironmentConfig( - name="pre", - s3_region="us-gov-west-1", - web_api_endpoint="https://preprod-tidy3d-api.simulation.cloud", - website_endpoint="https://preprod-tidy3d.simulation.cloud", -) - -prod = EnvironmentConfig( - name="prod", - s3_region="us-gov-west-1", - web_api_endpoint="https://tidy3d-api.simulation.cloud", - website_endpoint="https://tidy3d.simulation.cloud", -) - - -nexus = EnvironmentConfig( - name="nexus", - web_api_endpoint="http://127.0.0.1:5000", - ssl_verify=False, - enable_caching=False, - s3_region="us-east-1", - website_endpoint="http://127.0.0.1/tidy3d", - env_vars={"AWS_ENDPOINT_URL_S3": "http://127.0.0.1:9000"}, -) - - -class Environment: - """Environment decorator for user interactive. - - Example - ------- - >>> from tidy3d.web.core.environment import Env - >>> Env.dev.active() - >>> assert Env.current.name == "dev" - ... - """ - - env_map = { - "dev": dev, - "uat": uat, - "prod": prod, - "nexus": nexus, - } - - def __init__(self): - log = get_logger() - """Initialize the environment.""" - self._previous_env_vars = {} - env_key = os.environ.get("TIDY3D_ENV") - env_key = env_key.lower() if env_key else env_key - log.info(f"env_key is {env_key}") - if not env_key: - self._current = prod - elif env_key in self.env_map: - self._current = self.env_map[env_key] - else: - log.warning( - f"The value '{env_key}' for the environment variable TIDY3D_ENV is not supported. " - f"Using prod as default." - ) - self._current = prod - - if self._current.env_vars: - for key, value in self._current.env_vars.items(): - self._previous_env_vars[key] = os.environ.get(key) - os.environ[key] = value - - @property - def current(self) -> EnvironmentConfig: - """Get the current environment. - - Returns - ------- - EnvironmentConfig - The config for the current environment. - """ - return self._current - - @property - def dev(self) -> EnvironmentConfig: - """Get the dev environment. - - Returns - ------- - EnvironmentConfig - The config for the dev environment. - """ - return dev - - @property - def uat(self) -> EnvironmentConfig: - """Get the uat environment. - - Returns - ------- - EnvironmentConfig - The config for the uat environment. - """ - return uat - - @property - def pre(self) -> EnvironmentConfig: - """Get the preprod environment. - - Returns - ------- - EnvironmentConfig - The config for the preprod environment. - """ - return pre - - @property - def prod(self) -> EnvironmentConfig: - """Get the prod environment. - - Returns - ------- - EnvironmentConfig - The config for the prod environment. - """ - return prod - - @property - def nexus(self) -> EnvironmentConfig: - """Get the nexus environment. - - Returns - ------- - EnvironmentConfig - The config for the nexus environment. - """ - return nexus - - def set_current(self, config: EnvironmentConfig) -> None: - """Set the current environment. - - Parameters - ---------- - config : EnvironmentConfig - The environment to set to current. - """ - for key, value in self._previous_env_vars.items(): - if value is None: - if key in os.environ: - del os.environ[key] - else: - os.environ[key] = value - self._previous_env_vars = {} - - if config.env_vars: - for key, value in config.env_vars.items(): - self._previous_env_vars[key] = os.environ.get(key) - os.environ[key] = value - - self._current = config - - def enable_caching(self, enable_caching: bool = True) -> None: - """Set the environment configuration setting with regards to caching simulation results. - - Parameters - ---------- - enable_caching: bool = True - If ``True``, do duplicate checking. Return the previous simulation result if duplicate simulation is found. - If ``False``, do not duplicate checking. Just run the task directly. - """ - self._current.enable_caching = enable_caching - - def set_ssl_version(self, ssl_version: ssl.TLSVersion) -> None: - """Set the ssl version. - - Parameters - ---------- - ssl_version : ssl.TLSVersion - The ssl version to set. - """ - self._current.ssl_version = ssl_version - - -Env = Environment() +__all__ = ["Env", "Environment", "EnvironmentConfig"] diff --git a/tidy3d/web/core/http_util.py b/tidy3d/web/core/http_util.py index f802752bb1..d69198de43 100644 --- a/tidy3d/web/core/http_util.py +++ b/tidy3d/web/core/http_util.py @@ -6,13 +6,13 @@ import os from enum import Enum from functools import wraps -from os.path import expanduser import requests -import toml from requests.adapters import HTTPAdapter from urllib3.util.ssl_ import create_urllib3_context +from tidy3d.config import config + from . import core_config from .constants import ( HEADER_APIKEY, @@ -22,7 +22,6 @@ HEADER_SOURCE_VALUE, HEADER_USER_AGENT, HEADER_VERSION, - KEY_APIKEY, SIMCLOUD_APIKEY, ) from .core_config import get_logger @@ -31,14 +30,6 @@ REINITIALIZED = False -TIDY3D_DIR = f"{expanduser('~')}" -if os.access(TIDY3D_DIR, os.W_OK): - TIDY3D_DIR = f"{expanduser('~')}/.tidy3d" -else: - TIDY3D_DIR = "/tmp/.tidy3d" -CONFIG_FILE = TIDY3D_DIR + "/config" -CREDENTIAL_FILE = TIDY3D_DIR + "/auth.json" - class ResponseCodes(Enum): """HTTP response codes to handle individually.""" @@ -63,12 +54,17 @@ def api_key() -> None: if os.environ.get(SIMCLOUD_APIKEY): return os.environ.get(SIMCLOUD_APIKEY) - if os.path.exists(CONFIG_FILE): - with open(CONFIG_FILE, encoding="utf-8") as config_file: - config = toml.loads(config_file.read()) - return config.get(KEY_APIKEY, "") - return None + try: + apikey = config.web.apikey + except AttributeError: + return None + + if apikey is None: + return None + if hasattr(apikey, "get_secret_value"): + return apikey.get_secret_value() + return str(apikey) def api_key_auth(request: requests.request) -> requests.request: