Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/tox.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
python-version: ['3.10', '3.11', '3.12']

steps:
- uses: actions/checkout@v3
Expand Down
27 changes: 26 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,36 @@ Since the code is meant to be in kubernetes pods, the code needs to be packaged
Their descriptions can be found in `containers/`.
The root folder assumed to build the containers is the root of this package.

To build the taskmaster container, run:

```
docker build . -f containers/taskmaster.Dockerfile -t taskmaster:latest
```

The command is similar for the filer container:

```
docker build . -f containers/filer.Dockerfile -t filer:latest
```

## Unit testing

Unit testing needs the `tox` package.

You can install the package using `uv`:

```
uv install tox
```

To install different python versions using `uv`, you can type:

```
uv python install 3.10 3.11 3.12
```

This software will take care of creating virtual environments and installing dependencies in them before running the actual tests and generating the coverage reports.

```
$ tox
$ uv run tox
```
8 changes: 4 additions & 4 deletions containers/filer.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
# Builder: produce wheels

FROM alpine:3.19 as builder
FROM alpine:3.23 AS builder

RUN apk add --no-cache python3 py3-pip
RUN apk add --no-cache git
RUN python3 -m pip install --upgrade setuptools pip wheel --break-system-packages
RUN python3 -m pip install --upgrade setuptools pip wheel build --break-system-packages

WORKDIR /app/
COPY . .

RUN python3 setup.py bdist_wheel
RUN python3 -m build --wheel && rm -rf dist/*.tar.gz

# Install: copy tesk-core*.whl and install it with dependencies

FROM alpine:3.19
FROM alpine:3.23

RUN apk add --no-cache python3 py3-pip

Expand Down
8 changes: 4 additions & 4 deletions containers/taskmaster.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
# Builder: produce wheels

FROM alpine:3.19 as builder
FROM alpine:3.23 AS builder

RUN apk add --no-cache python3 py3-pip
RUN apk add --no-cache git
RUN python3 -m pip install --upgrade setuptools pip wheel --break-system-packages
RUN python3 -m pip install --upgrade setuptools pip wheel build --break-system-packages

WORKDIR /app/
COPY . .

RUN python3 setup.py bdist_wheel
RUN python3 -m build --wheel && rm -rf dist/*.tar.gz

# Install: copy tesk-core*.whl and install it with dependencies

FROM alpine:3.19
FROM alpine:3.23

RUN apk add --no-cache python3 py3-pip

Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[metadata]
description-file=README.md
description_file=README.md
[aliases]
test=pytest
17 changes: 4 additions & 13 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,11 @@
with codecs.open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
LONG_DESC = f.read()

INSTALL_DEPS = ['kubernetes==9.0.0',
INSTALL_DEPS = ['kubernetes==35.0.0',
'requests>=2.20.0',

# urllib3 constraint
'urllib3>=1.26,<2.0 ; python_version < "3.10"',
'urllib3>=2.0,<3.0 ; python_version >= "3.10"',
'urllib3>=2.6.0,<3.0 ; python_version >= "3.10"',

# boto3 constraint
'boto3<=1.28 ; python_version == "3.8"',
'boto3>=1.28,<2.0 ; python_version >= "3.9"',
]
TEST_DEPS = [ 'pytest',
Expand Down Expand Up @@ -56,10 +52,8 @@

'Intended Audience :: System Administrators',

'License :: OSI Approved :: Apache Software License',

'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7'
'Programming Language :: Python :: >=3.10'
],

# What does your project relate to?
Expand All @@ -74,7 +68,6 @@
'taskmaster = tesk_core.taskmaster:main'
]
},
test_suite='tests',

# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
Expand All @@ -84,9 +77,7 @@

setup_requires=['setuptools_scm'],

tests_require=TEST_DEPS,

python_requires='>=3.5, <4.0',
python_requires='>=3.10, <4.0',

# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
Expand Down
6 changes: 3 additions & 3 deletions src/tesk_core/filer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import json
import re
import os
import distutils.dir_util
import logging
import netrc
import requests
Expand Down Expand Up @@ -112,7 +111,7 @@ def copyFile(src, dst):
'''

# If there is any * in 'dst', use only the dirname (base path)
p = re.compile('.*\*.*')
p = re.compile(r'.*\*.*')
if p.match(dst):
dst=os.path.dirname(dst)

Expand Down Expand Up @@ -229,7 +228,8 @@ def download_file(self):
logging.debug('Downloading ftp file: "%s" Target: %s', self.url,
self.path)
basedir = os.path.dirname(self.path)
distutils.dir_util.mkpath(basedir)
if basedir and not os.path.exists(basedir):
os.makedirs(basedir, exist_ok=True)

return ftp_download_file(self.ftp_connection, self.url_path, self.path)

Expand Down
20 changes: 18 additions & 2 deletions src/tesk_core/filer_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import re
import botocore
import boto3
import mimetypes
from tesk_core.transput import Transput, Type

class S3Transput(Transput):
Expand Down Expand Up @@ -50,10 +51,24 @@ def download_file(self):
os.makedirs(basedir, exist_ok=True)
return self.get_s3_file(self.path, self.file_path)

def get_content_type(self):
# Guess content type based on filename; fallback to binary stream
mime, encoding = mimetypes.guess_type(self.path)
if mime is None:
return 'application/octet-stream'
elif mime.startswith('text/') or mime in ('application/json', 'application/xml', 'application/javascript'):
mime = f'{mime}; charset=utf-8'
return mime


def upload_file(self):
logging.debug('Uploading s3 object: "%s" Target: %s', self.path, self.bucket + "/" + self.file_path)
content_type = self.get_content_type()
logging.debug('Guessed Content-Type: %s for file: %s', content_type, self.path)
try:
self.bucket_obj.upload_file(Filename=self.path, Key=self.file_path)
# Pass ContentType via ExtraArgs so the object is uploaded with the right MIME type
self.bucket_obj.upload_file(Filename=self.path, Key=self.file_path,
ExtraArgs={'ContentType': content_type})
except (botocore.exceptions.ClientError, OSError) as err:
logging.error("File upload failed for '%s'", self.bucket + "/" + self.file_path)
logging.error(err)
Expand Down Expand Up @@ -100,7 +115,8 @@ def download_dir(self):
for obj in objects["Contents"]:
file_name = os.path.basename(obj["Key"])
dir_name = os.path.dirname(obj["Key"])
path_to_create = re.sub(r'^' + self.file_path.strip('/').replace('/', '\/') + '', "", dir_name).strip('/')
prefix = re.escape(self.file_path.strip('/'))
path_to_create = re.sub(r'^' + prefix, '', dir_name).strip('/')
path_to_create = os.path.join(self.path, path_to_create)
os.makedirs(path_to_create, exist_ok=True)
if self.get_s3_file(os.path.join(path_to_create, file_name), obj["Key"]):
Expand Down
17 changes: 8 additions & 9 deletions tests/FilerClassTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,15 @@ def test_env_vars(self):

pprint(f.spec)

self.assertEquals(f.getEnv(), [
self.assertEqual(f.getEnv(), [

{ 'name': 'JSON_INPUT' , 'value': '{"a": 1}' }
,{ 'name': 'HOST_BASE_PATH' , 'value': '/home/tfga/workspace/cwl-tes' }
,{ 'name': 'CONTAINER_BASE_PATH' , 'value': '/transfer' }
,{"name": "AWS_CONFIG_FILE", "value": "/aws/config"}
,{"name": "AWS_SHARED_CREDENTIALS_FILE", "value": "/aws/credentials"},
])
self.assertEquals(f.spec['spec']['backoffLimit'], 10)
self.assertEqual(f.spec['spec']['backoffLimit'], 10)


def test_mounts(self):
Expand Down Expand Up @@ -69,15 +69,15 @@ def test_mounts(self):

pprint(f.getVolumeMounts())

self.assertEquals(f.getVolumeMounts(), [
self.assertEqual(f.getVolumeMounts(), [

{ "name" : 'transfer-volume'
, 'mountPath' : path.CONTAINER_BASE_PATH,
},
{'mountPath': '/aws', 'name': 's3-conf', 'readOnly': True}
])

self.assertEquals(f.getVolumes(), [
self.assertEqual(f.getVolumes(), [

{ "name" : 'transfer-volume'
, 'persistentVolumeClaim' : { 'claimName' : 'transfer-pvc' }
Expand Down Expand Up @@ -112,10 +112,10 @@ def test_mounts_file_disabled(self):

pprint(f.getVolumeMounts())

self.assertEquals(f.getVolumeMounts() , [
self.assertEqual(f.getVolumeMounts() , [
{'mountPath': '/aws', 'name': 's3-conf', 'readOnly': True}
])
self.assertEquals(f.getVolumes() , [
self.assertEqual(f.getVolumes() , [
{
"name": "s3-conf",
"secret": {
Expand All @@ -139,11 +139,10 @@ def test_mounts_file_disabled(self):
def test_image_pull_policy(self):

f = Filer('name', {'a': 1})
self.assertEquals(f.getImagePullPolicy() , 'IfNotPresent')
self.assertEqual(f.getImagePullPolicy() , 'IfNotPresent')

f = Filer('name', {'a': 1}, pullPolicyAlways = True)
self.assertEquals(f.getImagePullPolicy() , 'Always')

self.assertEqual(f.getImagePullPolicy() , 'Always')



Expand Down
20 changes: 11 additions & 9 deletions tests/TaskMasterTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,15 @@



def pvcCreateMock(self): print '[mock] Creating PVC...'
def pvcDeleteMock(self): print '[mock] Deleting PVC...'
def pvcCreateMock(self):
print('[mock] Creating PVC...')

def pvcDeleteMock(self):
print('[mock] Deleting PVC...')


def jobRunToCompletionMock(job, b, c):

print "[mock] Creating job '{}'...".format(job.name)

print('[mock] Creating job "{}"...'.format(job.name))
return 'Complete'


Expand All @@ -31,7 +33,7 @@ def test_defaults(self):

print(args)

self.assertEquals( args
self.assertEqual( args
, Namespace( debug=False, file=None, filer_version='v0.1.9', json='json', namespace='default', poll_interval=5, state_file='/tmp/.teskstate'
, localKubeConfig=False
, pull_policy_always=False
Expand All @@ -47,7 +49,7 @@ def test_localKubeConfig(self):

print(args)

self.assertEquals( args
self.assertEqual( args
, Namespace( debug=False, file=None, filer_version='v0.1.9', json='json', namespace='default', poll_interval=5, state_file='/tmp/.teskstate'
, localKubeConfig=True
, pull_policy_always=False
Expand All @@ -59,8 +61,8 @@ def test_pullPolicyAlways(self):

parser = newParser()

self.assertEquals( parser.parse_args(['json' ]).pull_policy_always, False )
self.assertEquals( parser.parse_args(['json', '--pull-policy-always']).pull_policy_always, True )
self.assertEqual( parser.parse_args(['json' ]).pull_policy_always, False )
self.assertEqual( parser.parse_args(['json', '--pull-policy-always']).pull_policy_always, True )



Expand Down
2 changes: 1 addition & 1 deletion tests/test_filer.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def test_getPath(self):

def test_getPathNoScheme(self):

self.assertEquals( getPath('/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5')
self.assertEqual( getPath('/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5')
, '/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5')

self.assertEqual( containerPath('/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5')
Expand Down
21 changes: 20 additions & 1 deletion tests/test_s3_filer.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,24 @@ def test_s3_upload_file( moto_boto, path, url, ftype, expected,fs, caplog):
assert client.Object('tesk', 'folder/file.txt').load() == None


@pytest.mark.parametrize("filename, url, expected_content", [
("file.txt", "s3://tesk/folder/file.txt", "text/plain; charset=utf-8"),
("file.zip", "s3://tesk/folder/file.zip", "application/zip"),
])
def test_s3_upload_file_content_type(moto_boto, filename, url, expected_content, fs):
"""
Ensure uploaded objects have correct Content-Type metadata based on file extension
"""
fs.create_file(f"/home/user/filer_test/{filename}")
client = boto3.resource('s3', endpoint_url="http://s3.amazonaws.com")
trans = S3Transput(f"/home/user/filer_test/{filename}", url, "FILE")
trans.bucket_obj = client.Bucket(trans.bucket)
assert trans.upload_file() == 0
head = client.meta.client.head_object(Bucket=trans.bucket, Key=trans.file_path)
assert head['ContentType'] == expected_content




@pytest.mark.parametrize("path, url, ftype,expected", [
("tests", "s3://tesk/folder1/folder2","DIRECTORY",0),
Expand All @@ -134,7 +152,8 @@ def test_s3_upload_directory(path, url, ftype, expected, moto_boto, caplog):
otherwise an exception will be raised.
'''
assert client.Object('tesk', 'folder1/folder2/test_filer.py').load() == None

head = client.meta.client.head_object(Bucket=trans.bucket, Key='folder1/folder2/test_filer.py')
assert head['ContentType'].startswith('text/')
def test_upload_directory_for_unknown_file_type(moto_boto, fs, monkeypatch, caplog):
"""
Checking whether an exception is raised when the object type is neither file or directory
Expand Down
Loading