diff --git a/.azure-pipelines/azure-pipelines-linux.yml b/.azure-pipelines/azure-pipelines-linux.yml index a20f315..875d996 100755 --- a/.azure-pipelines/azure-pipelines-linux.yml +++ b/.azure-pipelines/azure-pipelines-linux.yml @@ -13,6 +13,7 @@ jobs: UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 timeoutInMinutes: 360 + variables: {} steps: # configure qemu binfmt-misc running. This allows us to run docker containers @@ -25,6 +26,9 @@ jobs: - script: | export CI=azure + export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) + export remote_url=$(Build.Repository.Uri) + export sha=$(Build.SourceVersion) export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) if [[ "${BUILD_REASON:-}" == "PullRequest" ]]; then diff --git a/.azure-pipelines/azure-pipelines-osx.yml b/.azure-pipelines/azure-pipelines-osx.yml index 9e4e167..a8ca2b4 100755 --- a/.azure-pipelines/azure-pipelines-osx.yml +++ b/.azure-pipelines/azure-pipelines-osx.yml @@ -5,18 +5,22 @@ jobs: - job: osx pool: - vmImage: macOS-11 + vmImage: macOS-12 strategy: matrix: osx_64_: CONFIG: osx_64_ UPLOAD_PACKAGES: 'True' timeoutInMinutes: 360 + variables: {} steps: # TODO: Fast finish on azure pipelines? - script: | export CI=azure + export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) + export remote_url=$(Build.Repository.Uri) + export sha=$(Build.SourceVersion) export OSX_FORCE_SDK_DOWNLOAD="1" export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) diff --git a/.azure-pipelines/azure-pipelines-win.yml b/.azure-pipelines/azure-pipelines-win.yml index 75da5df..204327b 100755 --- a/.azure-pipelines/azure-pipelines-win.yml +++ b/.azure-pipelines/azure-pipelines-win.yml @@ -17,13 +17,14 @@ jobs: UPLOAD_TEMP: D:\\tmp steps: + - task: PythonScript@0 displayName: 'Download Miniforge' inputs: scriptSource: inline script: | import urllib.request - url = 'https://github.com/conda-forge/miniforge/releases/latest/download/Mambaforge-Windows-x86_64.exe' + url = 'https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Windows-x86_64.exe' path = r"$(Build.ArtifactStagingDirectory)/Miniforge.exe" urllib.request.urlretrieve(url, path) @@ -35,52 +36,17 @@ jobs: displayName: Add conda to PATH - script: | - call activate base - mamba.exe install "python=3.10" conda-build conda pip boa conda-forge-ci-setup=3 -c conda-forge --strict-channel-priority --yes - displayName: Install conda-build - - - script: set PYTHONUNBUFFERED=1 - displayName: Set PYTHONUNBUFFERED - - # Configure the VM - - script: | - call activate base - setup_conda_rc .\ ".\recipe" .\.ci_support\%CONFIG%.yaml - displayName: conda-forge CI setup - - # Configure the VM. - - script: | - set "CI=azure" - call activate base - run_conda_forge_build_setup - displayName: conda-forge build setup - - - script: | - call activate base - if EXIST LICENSE.txt ( - copy LICENSE.txt "recipe\\recipe-scripts-license.txt" - ) - conda.exe mambabuild "recipe" -m .ci_support\%CONFIG%.yaml --suppress-variables %EXTRA_CB_OPTIONS% - displayName: Build recipe + call ".scripts\run_win_build.bat" + displayName: Run Windows build env: PYTHONUNBUFFERED: 1 - - script: | - set "FEEDSTOCK_NAME=%BUILD_REPOSITORY_NAME:*/=%" - call activate base - validate_recipe_outputs "%FEEDSTOCK_NAME%" - displayName: Validate Recipe Outputs - - - script: | - set "GIT_BRANCH=%BUILD_SOURCEBRANCHNAME%" - set "FEEDSTOCK_NAME=%BUILD_REPOSITORY_NAME:*/=%" - set "TEMP=$(UPLOAD_TEMP)" - if not exist "%TEMP%\" md "%TEMP%" - set "TMP=%TEMP%" - call activate base - upload_package --validate --feedstock-name="%FEEDSTOCK_NAME%" .\ ".\recipe" .ci_support\%CONFIG%.yaml - displayName: Upload package - env: + CONFIG: $(CONFIG) + CI: azure + flow_run_id: azure_$(Build.BuildNumber).$(System.JobAttempt) + remote_url: $(Build.Repository.Uri) + sha: $(Build.SourceVersion) + UPLOAD_PACKAGES: $(UPLOAD_PACKAGES) + UPLOAD_TEMP: $(UPLOAD_TEMP) BINSTAR_TOKEN: $(BINSTAR_TOKEN) FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) - STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) - condition: and(succeeded(), not(eq(variables['UPLOAD_PACKAGES'], 'False')), not(eq(variables['Build.Reason'], 'PullRequest'))) \ No newline at end of file + STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file diff --git a/.ci_support/linux_64_.yaml b/.ci_support/linux_64_.yaml index 4fec014..b44dfa6 100644 --- a/.ci_support/linux_64_.yaml +++ b/.ci_support/linux_64_.yaml @@ -1,5 +1,5 @@ cdt_name: -- cos6 +- cos7 channel_sources: - conda-forge channel_targets: diff --git a/.ci_support/osx_64_.yaml b/.ci_support/osx_64_.yaml index ff3b911..322c73b 100644 --- a/.ci_support/osx_64_.yaml +++ b/.ci_support/osx_64_.yaml @@ -1,5 +1,7 @@ MACOSX_DEPLOYMENT_TARGET: -- '10.9' +- '10.13' +MACOSX_SDK_VERSION: +- '10.13' channel_sources: - conda-forge channel_targets: diff --git a/.gitattributes b/.gitattributes index 7f32763..18f114a 100644 --- a/.gitattributes +++ b/.gitattributes @@ -20,8 +20,8 @@ bld.bat text eol=crlf .travis.yml linguist-generated=true .scripts/* linguist-generated=true .woodpecker.yml linguist-generated=true -LICENSE.txt linguist-generated=true -README.md linguist-generated=true +/LICENSE.txt linguist-generated=true +/README.md linguist-generated=true azure-pipelines.yml linguist-generated=true build-locally.py linguist-generated=true shippable.yml linguist-generated=true diff --git a/.gitignore b/.gitignore index c89ecb7..179afe5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,24 @@ -*.pyc +# User content belongs under recipe/. +# Feedstock configuration goes in `conda-forge.yml` +# Everything else is managed by the conda-smithy rerender process. +# Please do not modify + +# Ignore all files and folders in root +* +!/conda-forge.yml + +# Don't ignore any files/folders if the parent folder is 'un-ignored' +# This also avoids warnings when adding an already-checked file with an ignored parent. +!/**/ +# Don't ignore any files/folders recursively in the following folders +!/recipe/** +!/.ci_support/** -build_artifacts +# Since we ignore files/folders recursively, any folders inside +# build_artifacts gets ignored which trips some build systems. +# To avoid that we 'un-ignore' all files/folders recursively +# and only ignore the root build_artifacts folder. +!/build_artifacts/** +/build_artifacts + +*.pyc diff --git a/.scripts/build_steps.sh b/.scripts/build_steps.sh index 595f8b5..6c805a9 100755 --- a/.scripts/build_steps.sh +++ b/.scripts/build_steps.sh @@ -28,14 +28,15 @@ conda-build: pkgs_dirs: - ${FEEDSTOCK_ROOT}/build_artifacts/pkg_cache - /opt/conda/pkgs +solver: libmamba CONDARC +export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 - -mamba install --update-specs --yes --quiet --channel conda-forge \ - conda-build pip boa conda-forge-ci-setup=3 -mamba update --update-specs --yes --quiet --channel conda-forge \ - conda-build pip boa conda-forge-ci-setup=3 +mamba install --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ + pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" +mamba update --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ + pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" # set up the condarc setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" @@ -64,9 +65,16 @@ if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then # Drop into an interactive shell /bin/bash else - conda mambabuild "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ + conda-build "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ --suppress-variables ${EXTRA_CB_OPTIONS:-} \ - --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" + --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" \ + --extra-meta flow_run_id="${flow_run_id:-}" remote_url="${remote_url:-}" sha="${sha:-}" + ( startgroup "Inspecting artifacts" ) 2> /dev/null + + # inspect_artifacts was only added in conda-forge-ci-setup 4.6.0 + command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts || echo "inspect_artifacts needs conda-forge-ci-setup >=4.6.0" + + ( endgroup "Inspecting artifacts" ) 2> /dev/null ( startgroup "Validating outputs" ) 2> /dev/null validate_recipe_outputs "${FEEDSTOCK_NAME}" diff --git a/.scripts/logging_utils.sh b/.scripts/logging_utils.sh index 57bc95c..aff009f 100644 --- a/.scripts/logging_utils.sh +++ b/.scripts/logging_utils.sh @@ -12,7 +12,7 @@ function startgroup { echo "##[group]$1";; travis ) echo "$1" - echo -en 'travis_fold:start:'"${1// /}"'\\r';; + echo -en 'travis_fold:start:'"${1// /}"'\r';; github_actions ) echo "::group::$1";; * ) @@ -28,7 +28,7 @@ function endgroup { azure ) echo "##[endgroup]";; travis ) - echo -en 'travis_fold:end:'"${1// /}"'\\r';; + echo -en 'travis_fold:end:'"${1// /}"'\r';; github_actions ) echo "::endgroup::";; esac diff --git a/.scripts/run_docker_build.sh b/.scripts/run_docker_build.sh index 9236239..00f377a 100755 --- a/.scripts/run_docker_build.sh +++ b/.scripts/run_docker_build.sh @@ -21,6 +21,12 @@ if [ -z ${FEEDSTOCK_NAME} ]; then export FEEDSTOCK_NAME=$(basename ${FEEDSTOCK_ROOT}) fi +if [[ "${sha:-}" == "" ]]; then + pushd "${FEEDSTOCK_ROOT}" + sha=$(git rev-parse HEAD) + popd +fi + docker info # In order for the conda-build process in the container to write to the mounted @@ -91,6 +97,9 @@ docker run ${DOCKER_RUN_ARGS} \ -e CPU_COUNT \ -e BUILD_WITH_CONDA_DEBUG \ -e BUILD_OUTPUT_ID \ + -e flow_run_id \ + -e remote_url \ + -e sha \ -e BINSTAR_TOKEN \ -e FEEDSTOCK_TOKEN \ -e STAGING_BINSTAR_TOKEN \ diff --git a/.scripts/run_osx_build.sh b/.scripts/run_osx_build.sh index 5ef2a19..4ebc38f 100755 --- a/.scripts/run_osx_build.sh +++ b/.scripts/run_osx_build.sh @@ -11,7 +11,7 @@ MINIFORGE_HOME=${MINIFORGE_HOME:-${HOME}/miniforge3} ( startgroup "Installing a fresh version of Miniforge" ) 2> /dev/null MINIFORGE_URL="https://github.com/conda-forge/miniforge/releases/latest/download" -MINIFORGE_FILE="Mambaforge-MacOSX-$(uname -m).sh" +MINIFORGE_FILE="Miniforge3-MacOSX-$(uname -m).sh" curl -L -O "${MINIFORGE_URL}/${MINIFORGE_FILE}" rm -rf ${MINIFORGE_HOME} bash $MINIFORGE_FILE -b -p ${MINIFORGE_HOME} @@ -22,11 +22,13 @@ bash $MINIFORGE_FILE -b -p ${MINIFORGE_HOME} source ${MINIFORGE_HOME}/etc/profile.d/conda.sh conda activate base +export CONDA_SOLVER="libmamba" +export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 -mamba install --update-specs --quiet --yes --channel conda-forge \ - conda-build pip boa conda-forge-ci-setup=3 -mamba update --update-specs --yes --quiet --channel conda-forge \ - conda-build pip boa conda-forge-ci-setup=3 +mamba install --update-specs --quiet --yes --channel conda-forge --strict-channel-priority \ + pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" +mamba update --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ + pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" @@ -45,6 +47,10 @@ else echo -e "\n\nNot mangling homebrew as we are not running in CI" fi +if [[ "${sha:-}" == "" ]]; then + sha=$(git rev-parse HEAD) +fi + echo -e "\n\nRunning the build setup script." source run_conda_forge_build_setup @@ -71,9 +77,17 @@ if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then /bin/bash else - conda mambabuild ./recipe -m ./.ci_support/${CONFIG}.yaml \ + conda-build ./recipe -m ./.ci_support/${CONFIG}.yaml \ --suppress-variables ${EXTRA_CB_OPTIONS:-} \ - --clobber-file ./.ci_support/clobber_${CONFIG}.yaml + --clobber-file ./.ci_support/clobber_${CONFIG}.yaml \ + --extra-meta flow_run_id="$flow_run_id" remote_url="$remote_url" sha="$sha" + + ( startgroup "Inspecting artifacts" ) 2> /dev/null + + # inspect_artifacts was only added in conda-forge-ci-setup 4.6.0 + command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts || echo "inspect_artifacts needs conda-forge-ci-setup >=4.6.0" + + ( endgroup "Inspecting artifacts" ) 2> /dev/null ( startgroup "Validating outputs" ) 2> /dev/null validate_recipe_outputs "${FEEDSTOCK_NAME}" diff --git a/.scripts/run_win_build.bat b/.scripts/run_win_build.bat new file mode 100755 index 0000000..24ef201 --- /dev/null +++ b/.scripts/run_win_build.bat @@ -0,0 +1,125 @@ +:: PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here +:: will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent +:: changes to this script, consider a proposal to conda-smithy so that other feedstocks can also +:: benefit from the improvement. + +:: Note: we assume a Miniforge installation is available + +:: INPUTS (required environment variables) +:: CONFIG: name of the .ci_support/*.yaml file for this job +:: CI: azure, github_actions, or unset +:: UPLOAD_PACKAGES: true or false +:: UPLOAD_ON_BRANCH: true or false + +setlocal enableextensions enabledelayedexpansion + +call :start_group "Configuring conda" + +:: Activate the base conda environment +call activate base +:: Configure the solver +set "CONDA_SOLVER=libmamba" +if !errorlevel! neq 0 exit /b !errorlevel! +set "CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1" + +:: Provision the necessary dependencies to build the recipe later +echo Installing dependencies +mamba.exe install "python=3.10" pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" -c conda-forge --strict-channel-priority --yes +if !errorlevel! neq 0 exit /b !errorlevel! + +:: Set basic configuration +echo Setting up configuration +setup_conda_rc .\ ".\recipe" .\.ci_support\%CONFIG%.yaml +if !errorlevel! neq 0 exit /b !errorlevel! +echo Running build setup +CALL run_conda_forge_build_setup + + +if !errorlevel! neq 0 exit /b !errorlevel! + +if EXIST LICENSE.txt ( + echo Copying feedstock license + copy LICENSE.txt "recipe\\recipe-scripts-license.txt" +) + +if NOT [%flow_run_id%] == [] ( + set "EXTRA_CB_OPTIONS=%EXTRA_CB_OPTIONS% --extra-meta flow_run_id=%flow_run_id% remote_url=%remote_url% sha=%sha%" +) + +call :end_group + +:: Build the recipe +echo Building recipe +conda-build.exe "recipe" -m .ci_support\%CONFIG%.yaml --suppress-variables %EXTRA_CB_OPTIONS% +if !errorlevel! neq 0 exit /b !errorlevel! + +call :start_group "Inspecting artifacts" +:: inspect_artifacts was only added in conda-forge-ci-setup 4.6.0 +WHERE inspect_artifacts >nul 2>nul && inspect_artifacts || echo "inspect_artifacts needs conda-forge-ci-setup >=4.6.0" +call :end_group + +:: Prepare some environment variables for the upload step +if /i "%CI%" == "github_actions" ( + set "FEEDSTOCK_NAME=%GITHUB_REPOSITORY:*/=%" + set "GIT_BRANCH=%GITHUB_REF:refs/heads/=%" + if /i "%GITHUB_EVENT_NAME%" == "pull_request" ( + set "IS_PR_BUILD=True" + ) else ( + set "IS_PR_BUILD=False" + ) + set "TEMP=%RUNNER_TEMP%" +) +if /i "%CI%" == "azure" ( + set "FEEDSTOCK_NAME=%BUILD_REPOSITORY_NAME:*/=%" + set "GIT_BRANCH=%BUILD_SOURCEBRANCHNAME%" + if /i "%BUILD_REASON%" == "PullRequest" ( + set "IS_PR_BUILD=True" + ) else ( + set "IS_PR_BUILD=False" + ) + set "TEMP=%UPLOAD_TEMP%" +) + +:: Validate +call :start_group "Validating outputs" +validate_recipe_outputs "%FEEDSTOCK_NAME%" +if !errorlevel! neq 0 exit /b !errorlevel! +call :end_group + +if /i "%UPLOAD_PACKAGES%" == "true" ( + if /i "%IS_PR_BUILD%" == "false" ( + call :start_group "Uploading packages" + if not exist "%TEMP%\" md "%TEMP%" + set "TMP=%TEMP%" + upload_package --validate --feedstock-name="%FEEDSTOCK_NAME%" .\ ".\recipe" .ci_support\%CONFIG%.yaml + if !errorlevel! neq 0 exit /b !errorlevel! + call :end_group + ) +) + +exit + +:: Logging subroutines + +:start_group +if /i "%CI%" == "github_actions" ( + echo ::group::%~1 + exit /b +) +if /i "%CI%" == "azure" ( + echo ##[group]%~1 + exit /b +) +echo %~1 +exit /b + +:end_group +if /i "%CI%" == "github_actions" ( + echo ::endgroup:: + exit /b +) +if /i "%CI%" == "azure" ( + echo ##[endgroup] + exit /b +) +exit /b \ No newline at end of file diff --git a/README.md b/README.md index c3e454c..c19878e 100644 --- a/README.md +++ b/README.md @@ -103,7 +103,7 @@ available continuous integration services. Thanks to the awesome service provide [CircleCI](https://circleci.com/), [AppVeyor](https://www.appveyor.com/), [Drone](https://cloud.drone.io/welcome), and [TravisCI](https://travis-ci.com/) it is possible to build and upload installable packages to the -[conda-forge](https://anaconda.org/conda-forge) [Anaconda-Cloud](https://anaconda.org/) +[conda-forge](https://anaconda.org/conda-forge) [anaconda.org](https://anaconda.org/) channel for Linux, Windows and OSX respectively. To manage the continuous integration and simplify feedstock maintenance diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 6b346f5..e5306da 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -4,5 +4,5 @@ jobs: - template: ./.azure-pipelines/azure-pipelines-linux.yml - - template: ./.azure-pipelines/azure-pipelines-win.yml - - template: ./.azure-pipelines/azure-pipelines-osx.yml \ No newline at end of file + - template: ./.azure-pipelines/azure-pipelines-osx.yml + - template: ./.azure-pipelines/azure-pipelines-win.yml \ No newline at end of file diff --git a/build-locally.py b/build-locally.py index 3f4b7a7..d78427b 100755 --- a/build-locally.py +++ b/build-locally.py @@ -3,11 +3,11 @@ # This file has been generated by conda-smithy in order to build the recipe # locally. # -import os import glob +import os +import platform import subprocess from argparse import ArgumentParser -import platform def setup_environment(ns): @@ -64,8 +64,9 @@ def verify_config(ns): elif ns.config.startswith("osx"): if "OSX_SDK_DIR" not in os.environ: raise RuntimeError( - "Need OSX_SDK_DIR env variable set. Run 'export OSX_SDK_DIR=SDKs' " - "to download the SDK automatically to 'SDKs/MacOSX.sdk'. " + "Need OSX_SDK_DIR env variable set. Run 'export OSX_SDK_DIR=$PWD/SDKs' " + "to download the SDK automatically to '$PWD/SDKs/MacOSX.sdk'. " + "Note: OSX_SDK_DIR must be set to an absolute path. " "Setting this variable implies agreement to the licensing terms of the SDK by Apple." ) diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 11a2460..39e0b4b 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -1,4 +1,6 @@ -{% set version = "0.9" %} +#{% set siriusVersion = "6.0.4" %} +#{% set apiVersion = "2.1" %} +{% set version = "2.1+sirius6.0.4" %} {% set name = "r-sirius-ms" %} package: @@ -6,12 +8,12 @@ package: version: {{ version }} source: - url: https://github.com/boecker-lab/sirius-client-openAPI/archive/refs/tags/{{ version }}.zip - sha256: c26bbeb8b0b1c8a7894ab07254b5e0d7b681dbd1412b9ed7ff200c16ba3f2b0c + url: https://github.com/sirius-ms/sirius-client-openAPI/archive/refs/tags/{{ version }}.zip + sha256: 046a99355a1932775f6863279146ad77408d9e91cf52d1d243bc3581f5085e16 build: noarch: generic - number: 2 + number: 0 merge_build_host: True # [win] rpaths: - lib/R/lib/ @@ -22,20 +24,22 @@ requirements: - m2-zip # [win] - cross-r-base {{ r_base }} # [build_platform != target_platform] host: - - r-base >=3.5.0 + - r-base >=4.0.0 - r-jsonlite - r-httr2 - r-r6 - r-base64enc - r-stringr + - r-processx run: - - r-base >=3.5.0 + - r-base >=4.0.0 - r-jsonlite - r-httr2 - r-r6 - r-base64enc - r-stringr - - sirius-ms =5.7.0 + - r-processx + - sirius-ms =6.0.4 test: files: diff --git a/recipe/run_test.bat b/recipe/run_test.bat index 8c516a5..283e74e 100644 --- a/recipe/run_test.bat +++ b/recipe/run_test.bat @@ -19,7 +19,7 @@ ECHO "### [EXE] RUN ILP SOLVER TEST" if errorlevel 1 exit 1 ECHO "### [EXE] CHECK ILP SOLVER TEST" -If not exist "%cd%\test_fragtree.txt" ( +If not exist "%cd%\test_fragtree.json" ( echo "Framgentation tree test [EXE] failed!" exit 1 ) diff --git a/recipe/run_test.sh b/recipe/run_test.sh index 64e3740..2a16646 100644 --- a/recipe/run_test.sh +++ b/recipe/run_test.sh @@ -21,7 +21,7 @@ echo "### [SIRIUS API] Run Sirius test script" $R -e "source('$RECIPE_DIR/test_script.R')" echo "### [SIRIUS] Check SIRIUS test script results" -if [ ! -f "test_fragtree.txt" ]; then +if [ ! -f "test_fragtree.json" ]; then echo "Framgentation tree test failed!" exit 1 fi diff --git a/recipe/test_script.R b/recipe/test_script.R index 6dad148..f379802 100644 --- a/recipe/test_script.R +++ b/recipe/test_script.R @@ -3,21 +3,36 @@ options(error = traceback) library('Rsirius') sdk <- SiriusSDK$new() -wrapper <- sdk$start() +sirius_api <- sdk$attach_or_start_sirius() -wait_for_job <- function(pid, job) { - while (!(wrapper$computations_api$GetJob(pid, job$id)$progress$state == "DONE")) { +wait_for_job <- function(project_id, job) { + while (sirius_api$jobs_api$GetJob(project_id, job$id)$progress$state != "DONE") { Sys.sleep(1) } } -pspace <- wrapper$project_spaces_api$GetProjectSpaces()[[1]]$name +project_id <- "test_project" +project_dir <- paste(Sys.getenv('SRC_DIR'), project_id, sep="/") +sirius_api$projects_api$CreateProjectSpace(project_id, project_dir) + data <- file.path(Sys.getenv('SRC_DIR'),"Kaempferol.ms") -wrapper$compounds_api$ImportCompounds(pspace, c(data)) -config <- wrapper$computations_api$GetDefaultJobConfig() -compoundId <- "1_Kaempferol_Kaempferol" -formulaId <- "C15H10O6_[M+H]+" -compute_job <- wrapper$computations_api$StartJob(pspace, config) -wait_for_job(pspace, compute_job) -wrapper$formula_results_api$GetFragTree(pspace, compoundId, formulaId, data_file=file.path(".","test_fragtree.txt")) -sdk$shutdown() +sirius_api$projects_api$ImportPreprocessedData(project_id, input_files=data) + +job_submission <- sirius_api$jobs_api$GetDefaultJobConfig() +job_submission$spectraSearchParams$enabled <- FALSE +job_submission$formulaIdParams$enabled <- TRUE +job_submission$fingerprintPredictionParams$enabled <- FALSE +job_submission$structureDbSearchParams$enabled <- FALSE +job_submission$canopusParams$enabled <- FALSE +job_submission$msNovelistParams$enabled <- FALSE +job <- sirius_api$jobs_api$StartJob(project_id, job_submission) +wait_for_job(project_id, job) + +aligned_feature_id <- sirius_api$features_api$GetAlignedFeatures(project_id)[[1]]$alignedFeatureId +formula_id <- sirius_api$features_api$GetFormulaCandidates(project_id, aligned_feature_id)[[1]]$formulaId +tree <- sirius_api$features_api$GetFragTree(project_id, aligned_feature_id, formula_id) +write(tree$toJSONString(), "test_fragtree.json") + +sirius_api$projects_api$CloseProjectSpace(project_id) +unlink(project_dir, recursive=TRUE) +sdk$shutdown_sirius() \ No newline at end of file