Skip to content

Commit

Permalink
Merge pull request #2153 from NNPDF/remove_old_cd
Browse files Browse the repository at this point in the history
Remove old commondata
  • Loading branch information
scarlehoff authored Sep 18, 2024
2 parents e01e21e + 2bbff1a commit f1b8ab5
Show file tree
Hide file tree
Showing 5,959 changed files with 343,035 additions and 407,734 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
4 changes: 2 additions & 2 deletions doc/sphinx/source/data/data-config.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Experimental data storage
The central repository for ``CommonData`` in use by ``nnpdf`` projects is
located in the ``nnpdf`` git repository at

``nnpdf_data/nnpdf_data/new_commondata``
``nnpdf_data/nnpdf_data/commondata``

where a separate ``CommonData`` file is stored for each *Dataset* with the
filename format described in :ref:`dataset-naming-convention`.
Expand All @@ -45,7 +45,7 @@ The following lines will check whether a newly added theory can be read by valid
(change 700 by the id of your newly added theory).

.. code-block:: python
from nnpdf_data import theory_cards
from nnpdf_data.theorydbutils import fetch_theory
theory = fetch_theory(theory_cards, 700)
Expand Down
24 changes: 10 additions & 14 deletions nnpdf_data/nnpdf_data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,18 @@
from ._version import __version__

path_vpdata = pathlib.Path(__file__).parent
path_commondata = path_vpdata / "new_commondata"
path_commondata = path_vpdata / "commondata"

# VP should not have access to this file, only to the products
_path_legacy_mapping = path_commondata / "dataset_names.yml"
legacy_to_new_mapping = yaml.YAML().load(_path_legacy_mapping)
theory_cards = path_vpdata / "theory_cards"

_legacy_to_new_mapping_raw = yaml.YAML().load(_path_legacy_mapping)
# Convert strings into a dictionary
legacy_to_new_mapping = {
k: ({"dataset": v} if isinstance(v, str) else v) for k, v in _legacy_to_new_mapping_raw.items()
}


@lru_cache
def legacy_to_new_map(dataset_name, sys=None):
Expand All @@ -22,13 +27,6 @@ def legacy_to_new_map(dataset_name, sys=None):
return dataset_name, None

new_name = legacy_to_new_mapping[dataset_name]
if isinstance(new_name, str):
if sys is not None:
raise KeyError(
f"I cannot translate the combination of {dataset_name} and sys: {sys}. Please report this."
)
return new_name, None

variant = new_name.get("variant")
new_name = new_name["dataset"]
if sys is not None:
Expand All @@ -48,11 +46,9 @@ def new_to_legacy_map(dataset_name, variant_used):
# we can have 2 old dataset mapped to the same new one

possible_match = None
for old_name, new_name in legacy_to_new_mapping.items():
variant = None
if not isinstance(new_name, str):
variant = new_name.get("variant")
new_name = new_name["dataset"]
for old_name, new_info in legacy_to_new_mapping.items():
new_name = new_info["dataset"]
variant = new_info.get("variant")

if new_name == dataset_name:
if variant_used == variant:
Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

Loading

0 comments on commit f1b8ab5

Please sign in to comment.