Skip to content

Commit

Permalink
Merge pull request neurodatascience#21 from effigies/tests
Browse files Browse the repository at this point in the history
TEST: Add tox and GitHub actions
  • Loading branch information
mtorabi59 authored Mar 14, 2024
2 parents c0a4ead + a289023 commit b7d0606
Show file tree
Hide file tree
Showing 6 changed files with 209 additions and 6 deletions.
88 changes: 88 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
name: Build and test

on:
push:
branches:
- main
tags:
- "*"
pull_request:
branches:
- main
# Run weekly to avoid missing deprecations during low activity
schedule:
- cron: '0 0 * * 1'
# Allow job to be triggered manually from GitHub interface
workflow_dispatch:

defaults:
run:
shell: bash

# Force tox and pytest to use color
env:
FORCE_COLOR: true

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

permissions:
contents: read

jobs:
test:
# Check each OS, all supported Python, minimum versions and latest releases
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: ['ubuntu-latest']
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
include:
# Basic dependencies only
- os: ubuntu-latest
python-version: 3.8
dependencies: 'min'

env:
DEPENDS: ${{ matrix.dependencies }}
ARCH: ${{ !contains(fromJSON('["none", "min"]'), matrix.dependencies) && matrix.architecture }}

steps:
- uses: actions/checkout@v4
with:
submodules: recursive
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.architecture }}
allow-prereleases: true
- name: Display Python version
run: python -c "import sys; print(sys.version)"
- name: Install tox
run: |
python -m pip install --upgrade pip
python -m pip install tox tox-gh-actions
- name: Show tox config
run: tox c
- name: Run tox
run: tox -v --exit-and-dump-after 1200

publish:
runs-on: ubuntu-latest
environment: "Package deployment"
needs: [test]
permissions:
# Required for trusted publishing
id-token: write
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
fetch-depth: 0
- run: pipx run build
- uses: pypa/gh-action-pypi-publish@release/v1
9 changes: 5 additions & 4 deletions pydfc/data_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"""

import numpy as np
import hdf5storage
import h5py
import os

from .dfc_utils import intersection, label2network
Expand Down Expand Up @@ -59,7 +59,7 @@ def load_from_array(subj_id2load=None, **params):
returns a dictionary of TIME_SERIES objects
each corresponding to a session
- if the file_name is a .mat file, it will be loaded using hdf5storage
- if the file_name is a .mat file, it will be loaded using h5py
if the file_name is a .npy file, it will be loaded using np.load
- the roi locations should be in the same folder and a .npy file
Expand Down Expand Up @@ -113,7 +113,8 @@ def load_from_array(subj_id2load=None, **params):
# LOAD BOLD Data

if params['file_name'][params['file_name'].find('.'):] == '.mat':
DATA = hdf5storage.loadmat(params['data_root']+subj_fldr+'/'+params['file_name'])
with h5py.File(params['data_root']+subj_fldr+'/'+params['file_name'], 'r') as f:
DATA = {k: np.array(f[k]) for k in f.keys()}
elif params['file_name'][params['file_name'].find('.'):] == '.npy':
DATA = np.load(params['data_root']+subj_fldr+'/'+params['file_name'], allow_pickle='True').item()
time_series = DATA['ROI_data'] # time_series.shape = (time, roi)
Expand Down Expand Up @@ -347,4 +348,4 @@ def load_TS(



####################################################################################################################################
####################################################################################################################################
7 changes: 5 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ classifiers = [
"Operating System :: Microsoft :: Windows", #change later
]
dependencies = [
'hdf5storage',
'h5py',
'hmmlearn',
'ksvd',
'matplotlib',
Expand All @@ -31,4 +31,7 @@ maintainers = [{name = "Mohammad Torabi", email = "mohammad.torabi@mail.mcgill.c
name = "pydfc"
readme = "README.rst"
requires-python = ">=3.8"
keywords = ['python', 'dFC package', 'neuroimaging']
keywords = ['python', 'dFC package', 'neuroimaging']

[project.optional-dependencies]
test = ["pytest"]
Empty file added tests/__init__.py
Empty file.
35 changes: 35 additions & 0 deletions tests/test_data_loader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import pytest

import nibabel as nb
import numpy as np

from pydfc.data_loader import nifti2timeseries


# @pytest.fixture(scope="session")
# def rest_file(tmp_path_factory):
# URL = "https://s3.amazonaws.com/openneuro.org/ds002785/derivatives/fmriprep/sub-0001/func/sub-0001_task-restingstate_acq-mb3_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz?versionId=UfCs4xtwIEPDgmb32qFbtMokl_jxLUKr"
# tmpdir = tmp_path_factory.mktemp("data")
# file_path = tmpdir / "sub-0001_task-restingstate_acq-mb3_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz"
# with httpx.stream("GET", URL) as response:
# with file_path.open("wb") as f:
# for chunk in response.iter_bytes():
# f.write(chunk)
#
# return file_path


@pytest.fixture
def simulated_bold_data(tmp_path):
img = nb.Nifti1Image(np.random.rand(10, 10, 10, 100), np.eye(4))
img.to_filename(tmp_path / "simulated_bold.nii.gz")
return tmp_path / "simulated_bold.nii.gz"


def test_load(simulated_bold_data):
timeseries = nifti2timeseries(
nifti_file=str(simulated_bold_data),
n_rois=100,
Fs=1 / 0.75,
subj_id="sub-0001",
)
76 changes: 76 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
# This file encodes a lot of our intended support range, as well as some
# details about dependency availability.
#
# The majority of the information is contained in tox.envlist and testenv.deps.
[tox]
requires =
tox>=4
envlist =
py3{8,9,10,11,12}
py38-min
skip_missing_interpreters = true

# Configuration that allows us to split tests across GitHub runners effectively
[gh-actions]
python =
3.8: py38
3.9: py39
3.10: py310
3.11: py311
3.12: py312

[gh-actions:env]
DEPENDS =
min: min

[testenv]
description = Pytest with coverage
labels = test
pass_env =
# getpass.getuser() sources for Windows:
LOGNAME
USER
LNAME
USERNAME
# Pass user color preferences through
PY_COLORS
FORCE_COLOR
NO_COLOR
CLICOLOR
CLICOLOR_FORCE
extras = test

commands =
pytest -sv tests

[testenv:spellcheck]
description = Check spelling
labels = check
deps =
codespell[toml]
skip_install = true
commands =
codespell . {posargs}

[testenv:build{,-strict}]
labels =
check
pre-release
deps =
build
twine
skip_install = true
set_env =
build-strict: PYTHONWARNINGS=error
commands =
python -m build
python -m twine check dist/*

[testenv:publish]
depends = build
labels = release
deps =
twine
skip_install = true
commands =
python -m twine upload dist/*

0 comments on commit b7d0606

Please sign in to comment.