From df627ba91e63c857487e9faa2a26ab548d49728b Mon Sep 17 00:00:00 2001 From: juacrumar Date: Fri, 15 Dec 2023 09:04:53 +0100 Subject: [PATCH] add test for the python installation --- .github/workflows/python_installation.yml | 44 ++++++++++++++++++++ n3fit/src/n3fit/tests/test_preprocessing.py | 2 +- validphys2/src/validphys/tests/test_plots.py | 36 ++++++++++------ 3 files changed, 68 insertions(+), 14 deletions(-) create mode 100644 .github/workflows/python_installation.yml diff --git a/.github/workflows/python_installation.yml b/.github/workflows/python_installation.yml new file mode 100644 index 0000000000..35c7b209e7 --- /dev/null +++ b/.github/workflows/python_installation.yml @@ -0,0 +1,44 @@ +name: Test the python installation + +on: [push] + +jobs: + build: + strategy: + matrix: + os: [ubuntu-latest] + python-version: ["3.10", "3.11"] + include: + - os: ubuntu-latest + CONDA_OS: linux-64 + fail-fast: false + runs-on: ${{ matrix.os }} + env: + NETRC_FILE: ${{ secrets.NETRC_FILE }} + NNPDF_SSH_KEY: ${{ secrets.NNPDF_SSH_KEY }} + steps: + - uses: actions/checkout@v4 + - uses: conda-incubator/setup-miniconda@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Setup conda and install LHAPDF and pandoc + shell: bash -l {0} + run: | + echo "$NETRC_FILE" | base64 --decode > ~/.netrc + conda install -n base conda-libmamba-solver + conda config --set solver libmamba + conda config --append channels conda-forge + conda config --prepend channels https://packages.nnpdf.science/public + conda config --set show_channel_urls true + conda install lhapdf pandoc + conda activate test + - name: Install nnpdf with testing and qed extras + shell: bash -l {0} + run: | + conda activate test + pip install .[qed,tests] + - name: Test n3fit and validphys + shell: bash -l {0} + run: | + conda activate test + pytest --pyargs --mpl validphys n3fit diff --git a/n3fit/src/n3fit/tests/test_preprocessing.py b/n3fit/src/n3fit/tests/test_preprocessing.py index 3bf6a8966c..42b020bdd4 100644 --- a/n3fit/src/n3fit/tests/test_preprocessing.py +++ b/n3fit/src/n3fit/tests/test_preprocessing.py @@ -64,4 +64,4 @@ def test_preprocessing(): ] ] prefactors = prepro(test_x) - np.testing.assert_allclose(test_prefactors, prefactors) + np.testing.assert_allclose(test_prefactors, prefactors, rtol=1e-6) diff --git a/validphys2/src/validphys/tests/test_plots.py b/validphys2/src/validphys/tests/test_plots.py index da85cc5cd5..1e03f5b06a 100644 --- a/validphys2/src/validphys/tests/test_plots.py +++ b/validphys2/src/validphys/tests/test_plots.py @@ -1,5 +1,6 @@ import matplotlib -#This is to fix a weird bug in LHAPDF + +# This is to fix a weird bug in LHAPDF matplotlib.use('agg') import pytest @@ -7,52 +8,60 @@ from validphys.api import API from validphys.tests.conftest import PDF, THEORYID, DATA +TOLERANCE_VALUE = 12 + + @pytest.mark.linux -@pytest.mark.mpl_image_compare +@pytest.mark.mpl_image_compare(tolerance=TOLERANCE_VALUE) def test_plotpdfs(): pdfs = [PDF] Q = 10 flavours = ['g'] - #plot_pdfs returns a generator with (figure, name_hint) + # plot_pdfs returns a generator with (figure, name_hint) return next(API.plot_pdfs(pdfs=pdfs, Q=Q, flavours=flavours))[0] + @pytest.mark.linux -@pytest.mark.mpl_image_compare +@pytest.mark.mpl_image_compare(tolerance=TOLERANCE_VALUE) def test_dataspecschi2(): dsinpts = [ {'dataset': 'NMC'}, - {'dataset': 'ATLASTTBARTOT', 'cfac':['QCD']}, - {'dataset': 'CMSZDIFF12', 'cfac':('QCD', 'NRM'), 'sys':10} + {'dataset': 'ATLASTTBARTOT', 'cfac': ['QCD']}, + {'dataset': 'CMSZDIFF12', 'cfac': ('QCD', 'NRM'), 'sys': 10}, ] dataspecs = [ {'pdf': PDF, 'theoryid': THEORYID, 'speclabel': 'no t0'}, - {'pdf': PDF, 'theoryid': THEORYID, 'use_t0': False, 'speclabel': 'with t0'} + {'pdf': PDF, 'theoryid': THEORYID, 'use_t0': False, 'speclabel': 'with t0'}, ] return API.plot_dataspecs_datasets_chi2( dataset_inputs=dsinpts, dataspecs=dataspecs, use_cuts='internal', - metadata_group='experiment' + metadata_group='experiment', ) + @pytest.mark.linux -@pytest.mark.mpl_image_compare +@pytest.mark.mpl_image_compare(tolerance=TOLERANCE_VALUE) def test_plot_smpdf(single_data_internal_cuts_config): return next(API.plot_smpdf(**single_data_internal_cuts_config)) + @pytest.mark.linux -@pytest.mark.mpl_image_compare +@pytest.mark.mpl_image_compare(tolerance=TOLERANCE_VALUE) def test_plot_smpdf_categorical(single_data_categorical_internal_cuts_config): return next(API.plot_smpdf(**single_data_categorical_internal_cuts_config)) + @pytest.mark.linux -@pytest.mark.mpl_image_compare +@pytest.mark.mpl_image_compare(tolerance=TOLERANCE_VALUE) def test_plot_obscorrs(single_data_internal_cuts_config): corrpair = [{"corrpair": (i["dataset"],)} for i in DATA[:2]] return API.plot_obscorrs(**single_data_internal_cuts_config, corrpair=corrpair) + @pytest.mark.linux -@pytest.mark.mpl_image_compare +@pytest.mark.mpl_image_compare(tolerance=TOLERANCE_VALUE) def test_plot_xq2(): theoryid = THEORYID use_cuts = "nocuts" @@ -74,8 +83,9 @@ def test_plot_xq2(): metadata_group=metadata_group, ) + @pytest.mark.linux -@pytest.mark.mpl_image_compare +@pytest.mark.mpl_image_compare(tolerance=TOLERANCE_VALUE) def test_plot_xq2_custom(): theoryid = THEORYID use_cuts = "nocuts"