Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
24d1728
Refactor action to use Node.js entrypoint
JR-1991 Sep 24, 2025
e9bfc60
Add Dataverse GitHub Action entry and cleanup scripts
JR-1991 Sep 24, 2025
32baf22
Add ESLint config and GitHub Actions lint workflow
JR-1991 Sep 24, 2025
e95db99
Fix labelKey quoting in Docker inspect command
JR-1991 Sep 24, 2025
7276e77
Add node_modules to .gitignore
JR-1991 Sep 24, 2025
67f2889
Add build
JR-1991 Sep 24, 2025
2c73dba
Add Rollup and plugins for build process
JR-1991 Sep 24, 2025
dcee5ff
Update input/output tables and add build instructions
JR-1991 Sep 24, 2025
5c3d797
Update action entry points to dist directory
JR-1991 Sep 24, 2025
de46c3f
Add initial Rollup configuration
JR-1991 Sep 24, 2025
2b6a42e
Build as ESModule
JR-1991 Sep 24, 2025
5c70b24
Update ESLint config to use ES module source type
JR-1991 Sep 24, 2025
10741f7
Add instructions for pre-commit hook setup
JR-1991 Sep 24, 2025
1a9ad99
Refactor to use ES modules in index.js and post.js
JR-1991 Sep 24, 2025
6ccd542
Fix docker-compose path resolution in index.js and post.js
JR-1991 Sep 24, 2025
c0e4c13
Refactor root directory path variable usage
JR-1991 Sep 24, 2025
09fbfd9
Update .gitignore for Python cache files
JR-1991 Sep 24, 2025
efd88d6
Move source files to src directory
JR-1991 Sep 24, 2025
1aec65c
Remove version field from docker-compose.yml
JR-1991 Sep 24, 2025
b9d7a64
Move source files to src directory
JR-1991 Sep 24, 2025
273ab07
Add Husky pre-commit hook for automatic build
JR-1991 Sep 24, 2025
ea447fd
Squashed commit of the following:
JR-1991 Oct 8, 2025
15ef2f3
Update README to clarify preset contribution
JR-1991 Oct 8, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
name: Lint

on:
push:
pull_request:

jobs:
lint:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '23'
cache: 'npm'

- name: Install dependencies
run: npm ci --include=dev

- name: Run ESLint
run: npx eslint src/
81 changes: 81 additions & 0 deletions .github/workflows/scripts/initial-dataset.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
{
"datasetVersion": {
"license": {
"name": "CC0 1.0",
"uri": "http://creativecommons.org/publicdomain/zero/1.0"
},
"metadataBlocks": {
"citation": {
"fields": [
{
"value": "Darwin's Finches",
"typeClass": "primitive",
"multiple": false,
"typeName": "title"
},
{
"value": [
{
"authorName": {
"value": "Finch, Fiona",
"typeClass": "primitive",
"multiple": false,
"typeName": "authorName"
},
"authorAffiliation": {
"value": "Birds Inc.",
"typeClass": "primitive",
"multiple": false,
"typeName": "authorAffiliation"
}
}
],
"typeClass": "compound",
"multiple": true,
"typeName": "author"
},
{
"value": [
{ "datasetContactEmail" : {
"typeClass": "primitive",
"multiple": false,
"typeName": "datasetContactEmail",
"value" : "finch@mailinator.com"
},
"datasetContactName" : {
"typeClass": "primitive",
"multiple": false,
"typeName": "datasetContactName",
"value": "Finch, Fiona"
}
}],
"typeClass": "compound",
"multiple": true,
"typeName": "datasetContact"
},
{
"value": [ {
"dsDescriptionValue":{
"value": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.",
"multiple":false,
"typeClass": "primitive",
"typeName": "dsDescriptionValue"
}}],
"typeClass": "compound",
"multiple": true,
"typeName": "dsDescription"
},
{
"value": [
"Medicine, Health and Life Sciences"
],
"typeClass": "controlledVocabulary",
"multiple": true,
"typeName": "subject"
}
],
"displayName": "Citation Metadata"
}
}
}
}
117 changes: 116 additions & 1 deletion .github/workflows/scripts/test_dataverse.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import json
import os
from urllib.parse import urljoin

import requests


Expand Down Expand Up @@ -94,7 +96,7 @@ def test_create_collection(self):
headers=self.construct_header(),
json={
"name": "TestAction",
"alias": "test_colleczion",
"alias": "test_collection",
"dataverseContacts": [
{"contactEmail": "burrito@burritoplace.com"},
],
Expand All @@ -106,3 +108,116 @@ def test_create_collection(self):

assert response.status_code == 201, response.text
assert response.json()["status"] == "OK"

def test_set_storage_driver(self):
"""
Test case for setting the storage driver.

This test creates a new collection, sets the storage driver to LocalStack,
and then tests that the storage driver is set correctly.
"""

# First create a new collection
url = self.construct_url("api/dataverses/root")
response = requests.post(
url=url,
headers=self.construct_header(),
json={
"name": "TestStorageDriver",
"alias": "test_storage_driver",
"dataverseContacts": [
{"contactEmail": "burrito@burritoplace.com"},
],
"affiliation": "Burrito Research University",
"description": "We do all the (burrito) science.",
"dataverseType": "LABORATORY",
},
)

# Next, set the storage driver to LocalStack
url = self.construct_url(
"api/admin/dataverse/test_storage_driver/storageDriver"
)

response = requests.put(
url,
headers=self.construct_header(),
data="LocalStack",
)

assert response.status_code == 200, response.text
assert response.json()["status"] == "OK"

# Next, test the storage driver
response = requests.get(url, headers=self.construct_header())

assert response.status_code == 200, response.text
assert response.json()["status"] == "OK"
assert response.json()["data"]["message"] == "localstack1"

def test_direct_upload_ticket(self):
"""
Test case for creating a direct upload ticket.

This test sends a POST request to the specified URL with the necessary headers and payload
to create a direct upload ticket in the dataverse. It then asserts that the response status code is 201
and the response JSON contains a "status" key with the value "OK".
"""

# First create a new collection
url = self.construct_url("api/dataverses/root")
response = requests.post(
url=url,
headers=self.construct_header(),
json={
"name": "TestDirectUploadTicket",
"alias": "test_direct_upload_ticket",
"dataverseContacts": [
{"contactEmail": "burrito@burritoplace.com"},
],
"affiliation": "Burrito Research University",
"description": "We do all the (burrito) science.",
"dataverseType": "LABORATORY",
},
)

# Next, set the storage driver to LocalStack
url = self.construct_url(
"api/admin/dataverse/test_direct_upload_ticket/storageDriver"
)

response = requests.put(
url,
headers=self.construct_header(),
data="LocalStack",
)

assert response.status_code == 200, response.text
assert response.json()["status"] == "OK"

# Next create a new dataset
url = self.construct_url("api/dataverses/test_direct_upload_ticket/datasets")
with open(".github/workflows/scripts/initial-dataset.json", "r") as f:
initial_dataset = json.load(f)

response = requests.post(
url=url,
headers=self.construct_header(),
json=initial_dataset,
)

response.raise_for_status()
pid = response.json()["data"]["persistentId"]

# Next, get the upload URLs
url = self.construct_url(
f"/api/datasets/:persistentId/uploadurls/?persistentId={pid}&size=72428800"
)

response = requests.get(url, headers=self.construct_header())

assert response.status_code == 200, response.text
assert response.json()["status"] == "OK"

assert "urls" in response.json()["data"]
assert len(response.json()["data"]["urls"]) > 1
13 changes: 6 additions & 7 deletions .github/workflows/test-action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,19 @@ jobs:
env:
PORT: 8080
steps:
- name: "Checkout"
uses: "actions/checkout@v4"
- name: 'Checkout'
uses: 'actions/checkout@v4'
- name: Run Dataverse Action
id: dataverse
uses: ./
with:
postgresql_version: 15
jvm_options: |
dataverse.api.signature-secret=eikaizieb2Oghaex
dataverse.pid.datacite.rest-api-url=https://api.datacite.org
presets: localstack
min_part_size_mb: 10
- name: Setup Python
uses: actions/setup-python@v3
with:
python-version: "3.11"
python-version: '3.11'
- name: Install Python Dependencies
run: |
python -m pip install --upgrade pip
Expand All @@ -32,4 +31,4 @@ jobs:
BASE_URL: ${{ steps.dataverse.outputs.base_url }}
DV_VERSION: ${{ steps.dataverse.outputs.dv_version }}
run: |
python3 -m pytest .github/workflows/scripts/test_dataverse.py
python3 -m pytest -vv .github/workflows/scripts/test_dataverse.py
19 changes: 19 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
name: Unit Tests
on: [push, pull_request]

jobs:
test:
runs-on: ubuntu-latest
name: Run Unit Tests
steps:
- name: 'Checkout'
uses: 'actions/checkout@v4'
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '23'
cache: 'npm'
- name: Install Dependencies
run: npm ci --include=dev
- name: Run Tests
run: npm test
7 changes: 7 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,9 @@
# IDE folders
.idea/

# Node modules
node_modules/

# Python
__pycache__/
.pytest_cache/
38 changes: 38 additions & 0 deletions .husky/pre-commit
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"

# Pre-commit hook to automatically build the Dataverse action
echo " Building Dataverse action..."

# Check if we're in the right directory
if [ ! -f "package.json" ] || [ ! -f "rollup.config.js" ]; then
echo "❌ Not in the project root directory"
exit 1
fi

# Install dependencies if node_modules doesn't exist
if [ ! -d "node_modules" ]; then
echo "📦 Installing dependencies..."
npm install
fi

# Build the action
echo "🏗️ Running build..."
npm run build

# Check if build was successful
if [ $? -ne 0 ]; then
echo "❌ Build failed! Commit aborted."
exit 1
fi

# Check if dist files exist
if [ ! -f "dist/index.js" ] || [ ! -f "dist/post.js" ]; then
echo "❌ Build output files not found! Commit aborted."
exit 1
fi

echo "✅ Build successful! Proceeding with commit..."

# Add the built files to the commit
git add dist/
Loading