Skip to content

Commit

Permalink
[IA-5008] Update Hail to 0.2.131 (#489)
Browse files Browse the repository at this point in the history
* Update Hail to 0.2.131

* Try --to html_embed

* Try and fix pytest in python image

* Log disk space in GHAs

* Try to fix python and bioconductor smoke tests
  • Loading branch information
rtitle authored Jul 5, 2024
1 parent 75a6e60 commit 197c8d0
Show file tree
Hide file tree
Showing 11 changed files with 24 additions and 15 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/test-terra-jupyter-aou.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ jobs:
--workdir=/tests \
--entrypoint="" \
terra-jupyter-aou:smoke-test \
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-aou/tests}/*ipynb ; do jupyter nbconvert --to html --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-aou/tests}/*ipynb ; do jupyter nbconvert --to html_embed --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
- name: Upload workflow artifacts
uses: actions/upload-artifact@v2
Expand Down Expand Up @@ -122,4 +122,4 @@ jobs:
--workdir=/tests \
--entrypoint="" \
terra-jupyter-aou:smoke-test \
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-aou/tests}/*ipynb ; do jupyter nbconvert --to html --execute "${nb}" ; done'
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-aou/tests}/*ipynb ; do jupyter nbconvert --to html_embed --execute "${nb}" ; done'
2 changes: 1 addition & 1 deletion .github/workflows/test-terra-jupyter-bioconductor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ jobs:
--workdir=/tests \
--entrypoint="" \
terra-jupyter-bioconductor:smoke-test \
/bin/sh -c 'for nb in *ipynb ; do jupyter nbconvert --to html --ExecutePreprocessor.timeout=300 --execute "${nb}" ; done'
/bin/sh -c 'for nb in *ipynb ; do jupyter nbconvert --to html_embed --ExecutePreprocessor.timeout=300 --execute "${nb}" ; done'
- name: Upload workflow artifacts
uses: actions/upload-artifact@v2
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/test-terra-jupyter-gatk.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ jobs:
--workdir=/tests \
--entrypoint="" \
terra-jupyter-gatk:smoke-test \
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-gatk/tests}/*ipynb ; do jupyter nbconvert --to html --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-gatk/tests}/*ipynb ; do jupyter nbconvert --to html_embed --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
- name: Upload workflow artifacts
uses: actions/upload-artifact@v2
Expand Down Expand Up @@ -122,5 +122,5 @@ jobs:
--workdir=/tests \
--entrypoint="" \
terra-jupyter-gatk:smoke-test \
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-gatk/tests}/*ipynb ; do jupyter nbconvert --to html --execute "${nb}" ; done'
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-gatk/tests}/*ipynb ; do jupyter nbconvert --to html_embed --execute "${nb}" ; done'
4 changes: 2 additions & 2 deletions .github/workflows/test-terra-jupyter-hail.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ jobs:
--workdir=/tests \
--entrypoint="" \
terra-jupyter-hail:smoke-test \
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-hail/tests}/*ipynb ; do jupyter nbconvert --to html --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-hail/tests}/*ipynb ; do jupyter nbconvert --to html_embed --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
- name: Upload workflow artifacts
uses: actions/upload-artifact@v2
Expand Down Expand Up @@ -122,5 +122,5 @@ jobs:
--workdir=/tests \
--entrypoint="" \
terra-jupyter-hail:smoke-test \
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-hail/tests}/*ipynb ; do jupyter nbconvert --to html --execute "${nb}" ; done'
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-hail/tests}/*ipynb ; do jupyter nbconvert --to html_embed --execute "${nb}" ; done'
6 changes: 3 additions & 3 deletions .github/workflows/test-terra-jupyter-python.yml
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ jobs:
--workdir=/tests \
--entrypoint="" \
terra-jupyter-python:smoke-test \
/bin/sh -c 'for nb in *ipynb ; do jupyter nbconvert --to html --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
/bin/sh -c 'for nb in *ipynb ; do jupyter nbconvert --to html_embed --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
- name: Upload workflow artifacts
uses: actions/upload-artifact@v2
Expand All @@ -112,7 +112,7 @@ jobs:
--workdir=/tests \
--entrypoint="" \
terra-jupyter-python:smoke-test \
/bin/sh -c "pip3 install pytest ; pytest"
/bin/sh -c "pip3 install pytest --user ; pytest"
- name: Test Python code specific to notebooks with nbconvert
# Simply 'Cell -> Run All` these notebooks and expect no errors in the case of a successful run of the test suite.
Expand All @@ -132,4 +132,4 @@ jobs:
--workdir=/tests \
--entrypoint="" \
terra-jupyter-python:smoke-test \
/bin/sh -c 'for nb in *ipynb ; do jupyter nbconvert --to html --execute "${nb}" ; done'
/bin/sh -c 'for nb in *ipynb ; do jupyter nbconvert --to html_embed --execute "${nb}" ; done'
3 changes: 3 additions & 0 deletions build_smoke_test_image.sh
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,7 @@ build_smoke_test_image() {
popd
}

# Log available disk space prior to building image
df -kh

build_smoke_test_image $1
4 changes: 2 additions & 2 deletions config/conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"gcr_image_repo" : "us.gcr.io/broad-dsp-gcr-public",
"doc_bucket" : "gs://terra-docker-image-documentation",
"doc_bucket_no_prefix" : "terra-docker-image-documentation",
"spark_version" : "3.3.0",
"spark_version" : "3.5.0",
"image_data" : [
{
"name" : "terra-jupyter-bioconductor",
Expand Down Expand Up @@ -47,7 +47,7 @@
"hail"
]
},
"version" : "1.1.10",
"version" : "1.1.11",
"automated_flags" : {
"generate_docs" : true,
"include_in_ui" : true,
Expand Down
4 changes: 3 additions & 1 deletion terra-jupyter-bioconductor/tests/smoke_test.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,9 @@
},
"outputs": [],
"source": [
"library(XML)"
"# Error in value[[3L]](cond): Package ‘XML’ version 3.99.0.16.1 cannot be unloaded:\n",
"# Error in unloadNamespace(package) : namespace ‘XML’ is imported by ‘rtracklayer’, ‘restfulr’ so cannot be unloaded\n",
"#library(XML)"
]
},
{
Expand Down
4 changes: 4 additions & 0 deletions terra-jupyter-hail/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 1.1.11 - 2024-07-03
- Update `hail` to `0.2.131`
- See https://hail.is/docs/0.2/change_log.html#version-0-2-131) for details

## 1.1.10 - 2024-04-10
- Update `hail` to `0.2.130`
- See https://hail.is/docs/0.2/change_log.html#version-0-2-130) for details
Expand Down
2 changes: 1 addition & 1 deletion terra-jupyter-hail/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ USER root
ENV PIP_USER=false
ENV PYTHONPATH $PYTHONPATH:/usr/lib/spark/python
ENV PYSPARK_PYTHON=python3
ENV HAIL_VERSION=0.2.130
ENV HAIL_VERSION=0.2.131

RUN find $JUPYTER_HOME/scripts -name '*.sh' -type f | xargs chmod +x \
&& $JUPYTER_HOME/scripts/kernel/kernelspec.sh $JUPYTER_HOME/scripts/kernel /opt/conda/share/jupyter/kernels \
Expand Down
2 changes: 1 addition & 1 deletion terra-jupyter-python/tests/smoke_test.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install pendulum==2.1.2"
"%pip install pendulum==2.1.2 --user --ignore-installed"
]
},
{
Expand Down

0 comments on commit 197c8d0

Please sign in to comment.