From dc4c51eabe309e879c7672d48702d4ef45972e85 Mon Sep 17 00:00:00 2001 From: Lloyd Dakin Date: Fri, 29 Mar 2024 15:44:30 -0700 Subject: [PATCH 1/5] thumbnail service no zscaling --- .../datalab_session/data_operations/median.py | 29 +++++++---- datalab/datalab_session/util.py | 50 +++++++++++++++++-- 2 files changed, 66 insertions(+), 13 deletions(-) diff --git a/datalab/datalab_session/data_operations/median.py b/datalab/datalab_session/data_operations/median.py index 88299e4..50f7103 100644 --- a/datalab/datalab_session/data_operations/median.py +++ b/datalab/datalab_session/data_operations/median.py @@ -5,9 +5,10 @@ import numpy as np from astropy.io import fits +from PIL import Image from datalab.datalab_session.data_operations.data_operation import BaseDataOperation -from datalab.datalab_session.util import store_fits_output, get_archive_from_basename +from datalab.datalab_session.util import add_file_to_bucket, get_archive_from_basename, numpy_to_thumbnails log = logging.getLogger() log.setLevel(logging.INFO) @@ -45,9 +46,16 @@ def wizard_description(): def operate(self): input_files = self.input_data.get('input_files', []) file_count = len(input_files) + cache_key = self.generate_cache_key() + # Operation validation if file_count == 0: + log.warning(f'Tried to execute median operation on {file_count} files') return { 'output_files': [] } + # If cache key is already in S3 we already ran this operation on these inputs + if False: + # TODO: add a bucket check for cache key and then return files + pass log.info(f'Executing median operation on {file_count} files') @@ -63,7 +71,7 @@ def operate(self): except IndexError: continue - with fits.open(fits_url, use_fsspec=True) as hdu_list: + with fits.open(fits_url) as hdu_list: data = hdu_list['SCI'].data memmap_path = os.path.join(temp_dir, f'memmap_{index}.dat') memmap_array = np.memmap(memmap_path, dtype=data.dtype, mode='w+', shape=data.shape) @@ -85,20 +93,23 @@ def operate(self): # Calculate a Median along the z axis median = np.median(stacked_data, axis=2) - cache_key = self.generate_cache_key() + # Create thumbnails + jpgs = numpy_to_thumbnails(median) + + # Create the Fits File header = fits.Header([('KEY', cache_key)]) primary_hdu = fits.PrimaryHDU(header=header) image_hdu = fits.ImageHDU(median) hdu_list = fits.HDUList([primary_hdu, image_hdu]) - fits_buffer = BytesIO() - hdu_list.writeto(fits_buffer) - fits_buffer.seek(0) + # Save Fits and Thumbnails in S3 Buckets + add_file_to_bucket((cache_key + '/' + cache_key + '.fits'), hdu_list, 'FITS') + add_file_to_bucket((cache_key + '/' + cache_key + '-large.jpg'), jpgs['full'], 'JPEG') + add_file_to_bucket((cache_key + '/' + cache_key + '-small.jpg'), jpgs['thumbnail'], 'JPEG') - # Write the HDU List to the output FITS file in the bucket - response = store_fits_output(cache_key, fits_buffer) + # TODO: Get presigned urls for the jpgs and add to output - # TODO: No output yet, need to build a thumbnail service + # TODO: Return presigned urls as output output = {'output_files': []} self.set_percent_completion(file_count / file_count) self.set_output(output) diff --git a/datalab/datalab_session/util.py b/datalab/datalab_session/util.py index de60796..6f7780f 100644 --- a/datalab/datalab_session/util.py +++ b/datalab/datalab_session/util.py @@ -1,25 +1,45 @@ import requests import logging +from io import BytesIO import boto3 +from PIL import Image +import numpy as np +from fits2image.conversions import fits_to_jpg from django.conf import settings log = logging.getLogger() log.setLevel(logging.INFO) -def store_fits_output(item_key: str, fits_buffer: object) -> object: +def add_file_to_bucket(item_key: str, file: object, file_format:str) -> object: """ Stores a fits into the operation bucket in S3 Keyword Arguements: item_key -- name under which to store the fits file - fits_buffer -- the fits file to add to the bucket + fits_buffer -- the fits file in a BytesIO buffer to add to the bucket """ log.info(f'Adding {item_key} to {settings.DATALAB_OPERATION_BUCKET}') - s3 = boto3.resource('s3') - response = s3.Bucket(settings.DATALAB_OPERATION_BUCKET).put_object(Key = item_key, Body = fits_buffer.getvalue()) + buffer = BytesIO() + + if file_format == 'JPEG': + file.convert('RGB').save(buffer, format=file_format) + elif file_format == 'FITS': + file.writeto(buffer) + else: + log.error(f'Unknown file format {format}') + raise ValueError(f'add_file_to_bucket cant process {format} files') + + buffer.seek(0) + + s3 = boto3.client('s3') + response = s3.upload_fileobj( + buffer, + settings.DATALAB_OPERATION_BUCKET, + item_key + ) return response def get_archive_from_basename(basename: str) -> dict: @@ -41,3 +61,25 @@ def get_archive_from_basename(basename: str) -> dict: raise FileNotFoundError return results + +def numpy_to_thumbnails(img_arr: np.ndarray) -> dict: + """ + Transforms a 2D numpy array into full res and thumbnail jpg image + + Keyword Arguements: + basename -- name to query + """ + THUMBNAIL_HEIGHT = 256 + THUMBNAIL_WIDTH = 256 + try: + log.info(f'img_arr dtype: {img_arr.dtype}') + log.info(f'img_arr shape: {img_arr.shape}') + full_size_img = Image.fromarray(img_arr, mode='F') + log.info(full_size_img.mode) + full_size_img.show() + thumbnail_img = full_size_img.copy() + thumbnail_img.thumbnail((THUMBNAIL_WIDTH, THUMBNAIL_HEIGHT)) + return {'full': full_size_img, 'thumbnail': thumbnail_img} + except: + log.error(f'Failed to convert array {img_arr.shape} to jpgs') + raise OSError From 18a97d1a55278eafa918da6b15e3fcc25cfb4aba Mon Sep 17 00:00:00 2001 From: Lloyd Dakin Date: Tue, 2 Apr 2024 15:29:23 -0700 Subject: [PATCH 2/5] fits2image imeplementation, operation caching, presigned urls --- .../datalab_session/data_operations/median.py | 112 +++++-------- datalab/datalab_session/tasks.py | 23 ++- datalab/datalab_session/util.py | 157 +++++++++++++----- 3 files changed, 174 insertions(+), 118 deletions(-) diff --git a/datalab/datalab_session/data_operations/median.py b/datalab/datalab_session/data_operations/median.py index 50f7103..99229fa 100644 --- a/datalab/datalab_session/data_operations/median.py +++ b/datalab/datalab_session/data_operations/median.py @@ -1,14 +1,11 @@ -from io import BytesIO import logging -import os import tempfile import numpy as np -from astropy.io import fits -from PIL import Image +from fits2image.conversions import fits_to_jpg from datalab.datalab_session.data_operations.data_operation import BaseDataOperation -from datalab.datalab_session.util import add_file_to_bucket, get_archive_from_basename, numpy_to_thumbnails +from datalab.datalab_session.util import add_file_to_bucket, create_fits, stack_arrays, load_image_data_from_fits_urls log = logging.getLogger() log.setLevel(logging.INFO) @@ -43,73 +40,40 @@ def wizard_description(): } } - def operate(self): - input_files = self.input_data.get('input_files', []) - file_count = len(input_files) - cache_key = self.generate_cache_key() - - # Operation validation - if file_count == 0: - log.warning(f'Tried to execute median operation on {file_count} files') - return { 'output_files': [] } - # If cache key is already in S3 we already ran this operation on these inputs - if False: - # TODO: add a bucket check for cache key and then return files - pass - - log.info(f'Executing median operation on {file_count} files') - - with tempfile.TemporaryDirectory() as temp_dir: - memmap_paths = [] - - for index, file_info in enumerate(input_files): - basename = file_info.get('basename', 'No basename found') - archive_record = get_archive_from_basename(basename) - - try: - fits_url = archive_record[0].get('url', 'No URL found') - except IndexError: - continue - - with fits.open(fits_url) as hdu_list: - data = hdu_list['SCI'].data - memmap_path = os.path.join(temp_dir, f'memmap_{index}.dat') - memmap_array = np.memmap(memmap_path, dtype=data.dtype, mode='w+', shape=data.shape) - memmap_array[:] = data[:] - memmap_paths.append(memmap_path) - - self.set_percent_completion(index / file_count) - - image_data_list = [ - np.memmap(path, dtype=np.float32, mode='r', shape=memmap_array.shape) - for path in memmap_paths - ] - - # Crop fits image data to be the same shape then stack - min_shape = min(arr.shape for arr in image_data_list) - cropped_data_list = [arr[:min_shape[0], :min_shape[1]] for arr in image_data_list] - stacked_data = np.stack(cropped_data_list, axis=2) - - # Calculate a Median along the z axis - median = np.median(stacked_data, axis=2) - - # Create thumbnails - jpgs = numpy_to_thumbnails(median) - - # Create the Fits File - header = fits.Header([('KEY', cache_key)]) - primary_hdu = fits.PrimaryHDU(header=header) - image_hdu = fits.ImageHDU(median) - hdu_list = fits.HDUList([primary_hdu, image_hdu]) - - # Save Fits and Thumbnails in S3 Buckets - add_file_to_bucket((cache_key + '/' + cache_key + '.fits'), hdu_list, 'FITS') - add_file_to_bucket((cache_key + '/' + cache_key + '-large.jpg'), jpgs['full'], 'JPEG') - add_file_to_bucket((cache_key + '/' + cache_key + '-small.jpg'), jpgs['thumbnail'], 'JPEG') - - # TODO: Get presigned urls for the jpgs and add to output - - # TODO: Return presigned urls as output - output = {'output_files': []} - self.set_percent_completion(file_count / file_count) + def operate(self, input_files, cache_key): + + log.info(f'Executing median operation on {len(input_files)} files') + + image_data_list = load_image_data_from_fits_urls(input_files) + + self.set_percent_completion(0.4) + + stacked_data = stack_arrays(image_data_list) + + median = np.median(stacked_data, axis=2) + + hdu_list = create_fits(cache_key, median) + + # Create the output files to be stored in S3 + fits_path = tempfile.NamedTemporaryFile(suffix=f'{cache_key}.fits').name + large_jpg_path = tempfile.NamedTemporaryFile(suffix=f'{cache_key}-large.jpg').name + thumbnail_jpg_path = tempfile.NamedTemporaryFile(suffix=f'{cache_key}-small.jpg').name + + hdu_list.writeto(fits_path) + fits_to_jpg(fits_path, large_jpg_path, width=median.shape[0], height=median.shape[1]) + fits_to_jpg(fits_path, thumbnail_jpg_path) + + self.set_percent_completion(0.7) + + # Save Fits and Thumbnails in S3 Buckets + fits_url = add_file_to_bucket(f'{cache_key}/{cache_key}.fits', fits_path) + large_jpg_url = add_file_to_bucket(f'{cache_key}/{cache_key}-large.jpg', large_jpg_path) + thumbnail_jpg_url = add_file_to_bucket(f'{cache_key}/{cache_key}-small.jpg', thumbnail_jpg_path) + + self.set_percent_completion(0.9) + + output = {'output_files': [large_jpg_url, thumbnail_jpg_url]} + + log.info(f'Median operation output: {output}') + self.set_percent_completion(1) self.set_output(output) diff --git a/datalab/datalab_session/tasks.py b/datalab/datalab_session/tasks.py index 3b8a7fe..d52b6ba 100644 --- a/datalab/datalab_session/tasks.py +++ b/datalab/datalab_session/tasks.py @@ -1,6 +1,12 @@ +import logging + import dramatiq from datalab.datalab_session.data_operations.utils import available_operations +from datalab.datalab_session.util import get_presigned_url, key_exists + +log = logging.getLogger() +log.setLevel(logging.INFO) #TODO: Perhaps define a pipeline that can take the output of one data operation and upload to a s3 bucket, indicate success, etc... @@ -10,4 +16,19 @@ def execute_data_operation(data_operation_name: str, input_data: dict): if operation_class is None: raise NotImplementedError("Operation not implemented!") else: - operation_class(input_data).operate() + operation = operation_class(input_data) + cache_key = operation.generate_cache_key() + + # check if we've done this operation already + if key_exists(operation.generate_cache_key()): + log.info(f'Operation {cache_key} cached') + + large_jpg_url = get_presigned_url(f'{cache_key}/{cache_key}-large.jpg') + thumbnail_jpg_url = get_presigned_url(f'{cache_key}/{cache_key}-small.jpg') + + output = {'output_files': [large_jpg_url, thumbnail_jpg_url]} + + operation.set_percent_completion(1) + operation.set_output(output) + else: + operation.operate(input_data.get('input_files', []), cache_key) diff --git a/datalab/datalab_session/util.py b/datalab/datalab_session/util.py index 6f7780f..d481b16 100644 --- a/datalab/datalab_session/util.py +++ b/datalab/datalab_session/util.py @@ -1,53 +1,90 @@ import requests import logging -from io import BytesIO +import tempfile +import os import boto3 -from PIL import Image +from astropy.io import fits import numpy as np -from fits2image.conversions import fits_to_jpg from django.conf import settings log = logging.getLogger() log.setLevel(logging.INFO) -def add_file_to_bucket(item_key: str, file: object, file_format:str) -> object: +def add_file_to_bucket(item_key: str, path: object) -> str: """ Stores a fits into the operation bucket in S3 - Keyword Arguements: - item_key -- name under which to store the fits file - fits_buffer -- the fits file in a BytesIO buffer to add to the bucket + Args: + item_key -- name under which to store the fits file + fits_buffer -- the fits file in a BytesIO buffer to add to the bucket + + Returns: + A presigned url for the object just added to the bucket """ log.info(f'Adding {item_key} to {settings.DATALAB_OPERATION_BUCKET}') - buffer = BytesIO() - - if file_format == 'JPEG': - file.convert('RGB').save(buffer, format=file_format) - elif file_format == 'FITS': - file.writeto(buffer) - else: - log.error(f'Unknown file format {format}') - raise ValueError(f'add_file_to_bucket cant process {format} files') - - buffer.seek(0) - s3 = boto3.client('s3') - response = s3.upload_fileobj( - buffer, + response = s3.upload_file( + path, settings.DATALAB_OPERATION_BUCKET, item_key ) - return response + + return get_presigned_url(item_key) + +def get_presigned_url(key: str) -> str: + """ + Gets a presigned url from the operation bucket using the key + + Args: + item_key -- name to look up in the bucket + + Returns: + A presigned url for the object or None + """ + s3 = boto3.client('s3') + + try: + url = s3.generate_presigned_url( + ClientMethod='get_object', + Params={ + 'Bucket': settings.DATALAB_OPERATION_BUCKET, + 'Key': key + }, + ExpiresIn = 60 * 60 * 24 * 30 # URL will be valid for 30 days + ) + except: + log.error(f'File {key} not found in bucket') + return None + + return url + +def key_exists(key: str) -> bool: + """ + Checks if a given string exists as part of an object key in an S3 bucket. + + Args: + bucket_name (str): The name of the S3 bucket. + prefix (str): The string to look for in the object keys. + + Returns: + bool: True if at least one object key contains the given prefix, False otherwise. + """ + s3 = boto3.client('s3') + response = s3.list_objects_v2(Bucket=settings.DATALAB_OPERATION_BUCKET, Prefix=key, MaxKeys=1) + return 'Contents' in response def get_archive_from_basename(basename: str) -> dict: """ - Queries and returns an archive file from the Archive + Looks for the key as a prefix in the operations s3 bucket - Keyword Arguements: - basename -- name to query + Args: + basename -- name to query + + Returns: + dict of archive fits urls """ query_params = {'basename_exact': basename } @@ -62,24 +99,58 @@ def get_archive_from_basename(basename: str) -> dict: return results -def numpy_to_thumbnails(img_arr: np.ndarray) -> dict: +def create_fits(key: str, image_arr: np.ndarray) -> fits.HDUList: + + header = fits.Header([('KEY', key)]) + primary_hdu = fits.PrimaryHDU(header=header) + image_hdu = fits.ImageHDU(image_arr) + + hdu_list = fits.HDUList([primary_hdu, image_hdu]) + + return hdu_list + +def stack_arrays(array_list: list): """ - Transforms a 2D numpy array into full res and thumbnail jpg image + Takes a list of numpy arrays, crops them to an equal shape, and stacks them to be a 3d numpy array - Keyword Arguements: - basename -- name to query """ - THUMBNAIL_HEIGHT = 256 - THUMBNAIL_WIDTH = 256 - try: - log.info(f'img_arr dtype: {img_arr.dtype}') - log.info(f'img_arr shape: {img_arr.shape}') - full_size_img = Image.fromarray(img_arr, mode='F') - log.info(full_size_img.mode) - full_size_img.show() - thumbnail_img = full_size_img.copy() - thumbnail_img.thumbnail((THUMBNAIL_WIDTH, THUMBNAIL_HEIGHT)) - return {'full': full_size_img, 'thumbnail': thumbnail_img} - except: - log.error(f'Failed to convert array {img_arr.shape} to jpgs') - raise OSError + min_shape = min(arr.shape for arr in array_list) + cropped_data_list = [arr[:min_shape[0], :min_shape[1]] for arr in array_list] + + stacked = np.stack(cropped_data_list, axis=2) + + return stacked + +def load_image_data_from_fits_urls(input_files: list[dict]) -> list[np.memmap]: + """ + Load image data from FITS URLs and return a list of memory-mapped arrays. + + Args: + input_files (list): A list of dictionaries containing file information. + + Returns: + list: A list of memory-mapped arrays containing the image data. + """ + memmap_paths = [] + + with tempfile.TemporaryDirectory() as temp_dir: + for index, file_info in enumerate(input_files): + basename = file_info.get('basename', 'No basename found') + archive_record = get_archive_from_basename(basename) + + try: + fits_url = archive_record[0].get('url', 'No URL found') + except IndexError: + continue + + with fits.open(fits_url) as hdu_list: + data = hdu_list['SCI'].data + memmap_path = os.path.join(temp_dir, f'memmap_{index}.dat') + memmap_array = np.memmap(memmap_path, dtype=data.dtype, mode='w+', shape=data.shape) + memmap_array[:] = data[:] + memmap_paths.append(memmap_path) + + return [ + np.memmap(path, dtype=np.float32, mode='r', shape=memmap_array.shape) + for path in memmap_paths + ] From aca4b8b16ee6583da62dc6a619f421ef884b6f5c Mon Sep 17 00:00:00 2001 From: Lloyd Dakin Date: Wed, 3 Apr 2024 12:12:49 -0700 Subject: [PATCH 3/5] updating poetry and toml with new libraries astropy, numpy, and fits2image --- poetry.lock | 347 ++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 3 + 2 files changed, 348 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4b85f86..c9f43c3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -17,10 +18,80 @@ typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] +[[package]] +name = "astropy" +version = "6.0.1" +description = "Astronomy and astrophysics core library" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "astropy-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2b5ff962b0e586953f95b63ec047e1d7a3b6a12a13d11c6e909e0bcd3e05b445"}, + {file = "astropy-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:129ed1fb1d23e6fbf8b8e697c2e7340d99bc6271b8c59f9572f3f47063a42e6a"}, + {file = "astropy-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e998ee0ffa58342b4d44f2843b036015e3a6326b53185c5361fea4430658466"}, + {file = "astropy-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c33e3d746c3e7a324dbd76b236fe1e44304d5b6d941a1f724f419d01666d6d88"}, + {file = "astropy-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2f53caf9efebcc9040a92c977dcdae78dd0ff4de218fd316e4fcaffd9ace8dc1"}, + {file = "astropy-6.0.1-cp310-cp310-win32.whl", hash = "sha256:242b8f101301ab303366109d0dfe3cf0db745bf778f7b859fb486105197577d1"}, + {file = "astropy-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:1db9e95438472f6ed53fa2f4e2811c2d84f4085eeacc3cb8820d770d1ea61d1c"}, + {file = "astropy-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c682967736228cc4477e63db0e8854375dd31d755de55b30256de98f1f7b7c23"}, + {file = "astropy-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5208b6f10956ca92efb73375364c81a7df365b441b07f4941a24ee0f1bd9e292"}, + {file = "astropy-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f28facb5800c0617f233c1db0e622da83de1f74ca28d0ff8646e360d4fda74e"}, + {file = "astropy-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c00922548a666b026e2630a563090341d74c8222066e9c84c9673395bca7363"}, + {file = "astropy-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9b3bf27c51fb46bba993695eebd0c39a4e2a792b707e65b28ac4e8ae703f93d4"}, + {file = "astropy-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1f183ab42655ad09b064a4e8eb9cd1eaa138b90ca2f0cd82a200afda062063a5"}, + {file = "astropy-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d934aff5fe81e84a45098e281f969976963cc16b3401176a8171affd84301a27"}, + {file = "astropy-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4fdd54fa57b85d50c4b83ab7ffd90ba2ffcc3d725e3f8d5ffa1ff5f500ef6b97"}, + {file = "astropy-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d1eb40fe68121753f43fc82d618a2eae53dd0731689e124ef9e002aa2c241c4f"}, + {file = "astropy-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8bc267738a85f633142c246dceefa722b653e7ba99f02e86dd9a7b980467eafc"}, + {file = "astropy-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e604898ca1790c9fd2e2dc83b38f9185556ea618a3a6e6be31c286fafbebd165"}, + {file = "astropy-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:034dff5994428fb89813f40a18600dd8804128c52edf3d1baa8936eca3738de4"}, + {file = "astropy-6.0.1-cp312-cp312-win32.whl", hash = "sha256:87ebbae7ba52f4de9b9f45029a3167d6515399138048d0b734c9033fda7fd723"}, + {file = "astropy-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fbd6d88935749ae892445691ac0dbd1923fc6d8094753a35150fc7756118fe3"}, + {file = "astropy-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f18536d6f97faa81ed6c9af7bb2e27b376b41b27399f862e3b13387538c966b9"}, + {file = "astropy-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:764992af1ee1cd6d6f26373d09ddb5ede639d025ce9ff658b3b6580dc2ba4ec6"}, + {file = "astropy-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34fd2bb39cbfa6a8815b5cc99008d59057b9d341db00c67dbb40a3784a8dfb08"}, + {file = "astropy-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca9da00bfa95fbf8475d22aba6d7d046f3821a107b733fc7c7c35c74fcfa2bbf"}, + {file = "astropy-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15a5da8a0a84d75b55fafd56630578131c3c9186e4e486b4d2fb15c349b844d0"}, + {file = "astropy-6.0.1-cp39-cp39-win32.whl", hash = "sha256:46cbadf360bbadb6a106217e104b91f85dd618658caffdaab5d54a14d0d52563"}, + {file = "astropy-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:eaff9388a2fed0757bd0b4c41c9346d0edea9e7e938a4bfa8070eaabbb538a23"}, + {file = "astropy-6.0.1.tar.gz", hash = "sha256:89a975de356d0608e74f1f493442fb3acbbb7a85b739e074460bb0340014b39c"}, +] + +[package.dependencies] +astropy-iers-data = ">=0.2024.2.26.0.28.55" +numpy = ">=1.22,<2" +packaging = ">=19.0" +pyerfa = ">=2.0.1.1" +PyYAML = ">=3.13" + +[package.extras] +all = ["asdf-astropy (>=0.3)", "astropy[recommended]", "beautifulsoup4", "bleach", "bottleneck", "certifi", "dask[array]", "fsspec[http] (>=2023.4.0)", "h5py", "html5lib", "ipython (>=4.2)", "jplephem", "mpmath", "pandas", "pre-commit", "pyarrow (>=5.0.0)", "pytest (>=7.0)", "pytz", "s3fs (>=2023.4.0)", "sortedcontainers", "typing-extensions (>=3.10.0.1)"] +docs = ["Jinja2 (>=3.1.3)", "astropy[recommended]", "pytest (>=7.0)", "sphinx", "sphinx-astropy[confv2] (>=1.9.1)", "sphinx-changelog (>=1.2.0)", "sphinx-design", "tomli"] +recommended = ["matplotlib (>=3.3,!=3.4.0,!=3.5.2)", "scipy (>=1.5)"] +test = ["pytest (>=7.0)", "pytest-astropy (>=0.10)", "pytest-astropy-header (>=0.2.1)", "pytest-doctestplus (>=0.12)", "pytest-xdist", "threadpoolctl"] +test-all = ["astropy[test]", "coverage[toml]", "ipython (>=4.2)", "objgraph", "sgp4 (>=2.3)", "skyfield (>=1.20)"] + +[[package]] +name = "astropy-iers-data" +version = "0.2024.4.1.0.33.14" +description = "IERS Earth Rotation and Leap Second tables for the astropy core package" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "astropy-iers-data-0.2024.4.1.0.33.14.tar.gz", hash = "sha256:e09dc2c52e26b3fac604f29fb97de28fb2f691d6c7da65ac5fece6b56dc2ac2a"}, + {file = "astropy_iers_data-0.2024.4.1.0.33.14-py3-none-any.whl", hash = "sha256:b70b91dddc36454b385dcb4be8af5728fa869b8a725dc0a06b8757ee2ef617bd"}, +] + +[package.extras] +docs = ["pytest"] +test = ["hypothesis", "pytest", "pytest-remotedata"] + [[package]] name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -32,6 +103,7 @@ files = [ name = "certifi" version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -43,6 +115,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -107,6 +180,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -206,6 +280,7 @@ files = [ name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -217,6 +292,7 @@ files = [ name = "django" version = "4.2.10" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -237,6 +313,7 @@ bcrypt = ["bcrypt"] name = "django-cors-headers" version = "4.3.1" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -252,6 +329,7 @@ Django = ">=3.2" name = "django-dramatiq" version = "0.11.6" description = "A Django app for Dramatiq." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -270,6 +348,7 @@ dev = ["bumpversion", "flake8", "flake8-quotes", "isort", "pytest", "pytest-cov" name = "django-extensions" version = "3.2.3" description = "Extensions for Django" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -284,6 +363,7 @@ Django = ">=3.2" name = "django-filter" version = "23.5" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -298,6 +378,7 @@ Django = ">=3.2" name = "djangorestframework" version = "3.14.0" description = "Web APIs for Django, made easy." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -313,6 +394,7 @@ pytz = "*" name = "dramatiq" version = "1.16.0" description = "Background Processing for Python 3." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -336,6 +418,7 @@ watch = ["watchdog", "watchdog-gevent"] name = "drf-nested-routers" version = "0.93.5" description = "Nested resources for the Django Rest Framework" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -351,6 +434,7 @@ djangorestframework = ">=3.14.0" name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -361,10 +445,28 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "fits2image" +version = "0.4.7" +description = "Common libraries for the conversion and scaling of fits images" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "fits2image-0.4.7-py3-none-any.whl", hash = "sha256:0727d0dfef482a1493399f3b44b59085fd0beb1b9d81aa32fd1882272561dae5"}, + {file = "fits2image-0.4.7.tar.gz", hash = "sha256:25a29c0b442c4d025d6bf546c677b7539d3a57cedfc65ee87315c618feece387"}, +] + +[package.dependencies] +astropy = "*" +numpy = "*" +Pillow = "*" + [[package]] name = "hiredis" version = "2.3.2" description = "Python wrapper for hiredis" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -483,6 +585,7 @@ files = [ name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -494,6 +597,7 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -501,10 +605,57 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + [[package]] name = "ocs-authentication" version = "0.2.2" description = "Authentication backends and utilities for the OCS applications" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -521,6 +672,7 @@ requests = ">=2,<3" name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -532,6 +684,7 @@ files = [ name = "pika" version = "1.3.2" description = "Pika Python AMQP Client Library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -544,10 +697,98 @@ gevent = ["gevent"] tornado = ["tornado"] twisted = ["twisted"] +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + [[package]] name = "pluggy" version = "1.4.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -563,6 +804,7 @@ testing = ["pytest", "pytest-benchmark"] name = "prometheus-client" version = "0.19.0" description = "Python client for the Prometheus monitoring system." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -577,6 +819,7 @@ twisted = ["twisted"] name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -658,6 +901,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -665,10 +909,39 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pyerfa" +version = "2.0.1.1" +description = "Python bindings for ERFA" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pyerfa-2.0.1.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:1ce322ac30673c2aeb0ee22ced4938c1e9e26db0cbe175912a213aaff42383df"}, + {file = "pyerfa-2.0.1.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:67dfc00dcdea87a9b3c0bb4596fb0cfb54ee9c1c75fdcf19411d1029a18f6eec"}, + {file = "pyerfa-2.0.1.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34ee545780246fb0d1d3f7e46a6daa152be06a26b2d27fbfe309cab9ab488ea7"}, + {file = "pyerfa-2.0.1.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db85db72ab352da6ffc790e41209d8f41feb5b175d682cf1f0e3e60e9e5cdf8"}, + {file = "pyerfa-2.0.1.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c50b7cdb005632931b7b56a679cf25361ed6b3aa7c21e491e65cc89cb337e66a"}, + {file = "pyerfa-2.0.1.1-cp39-abi3-win32.whl", hash = "sha256:30649047b7a8ce19f43e4d18a26b8a44405a6bb406df16c687330a3b937723b2"}, + {file = "pyerfa-2.0.1.1-cp39-abi3-win_amd64.whl", hash = "sha256:94df7566ce5a5abb14e2dd1fe879204390639e9a76383ec27f10598eb24be760"}, + {file = "pyerfa-2.0.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0e95cf3d11f76f473bf011980e9ea367ca7e68ca675d8b32499814fb6e387d4c"}, + {file = "pyerfa-2.0.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08b5abb90b34e819c1fca69047a76c0d344cb0c8fe4f7c8773f032d8afd623b4"}, + {file = "pyerfa-2.0.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1c0c1efa701cab986aa58d03c58f77e47ea1898bff2684377d29580a055f836a"}, + {file = "pyerfa-2.0.1.1.tar.gz", hash = "sha256:dbac74ef8d3d3b0f22ef0ad3bbbdb30b2a9e10570b1fa5a98be34c7be36c9a6b"}, +] + +[package.dependencies] +numpy = ">=1.19" + +[package.extras] +docs = ["sphinx-astropy (>=1.3)"] +test = ["pytest", "pytest-doctestplus (>=0.7)"] + [[package]] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -691,6 +964,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytz" version = "2024.1" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -698,10 +972,72 @@ files = [ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "rabbitmq" version = "0.2.0" description = "CFFI bindings to librabbitmq 0.8.0" +category = "main" optional = false python-versions = "*" files = [ @@ -715,6 +1051,7 @@ cffi = "*" name = "redis" version = "5.0.1" description = "Python client for Redis database and key-value store" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -733,6 +1070,7 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -754,6 +1092,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "sqlparse" version = "0.4.4" description = "A non-validating SQL parser." +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -770,6 +1109,7 @@ test = ["pytest", "pytest-cov"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -781,6 +1121,7 @@ files = [ name = "typing-extensions" version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -792,6 +1133,7 @@ files = [ name = "tzdata" version = "2023.4" description = "Provider of IANA time zone data" +category = "main" optional = false python-versions = ">=2" files = [ @@ -803,6 +1145,7 @@ files = [ name = "urllib3" version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -819,4 +1162,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "89b66dbd7aa7e4d601bff62552c08917a803d2cb672898c420b5a9863433ddc3" +content-hash = "ae7148f25a2f32f5bd8c6fcc8b5eab56a85443598f4edca498bef8ebb834bced" diff --git a/pyproject.toml b/pyproject.toml index ea95039..ffaa207 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,9 @@ redis = "^5.0.1" rabbitmq = "^0.2.0" pika = "^1.3.2" hiredis = "^2.3.2" +numpy = "^1.26.4" +astropy = "^6.0.1" +fits2image = "^0.4.7" [tool.poetry.group.dev.dependencies] pytest = "^7.4.3" From d3530c960d40f7dee6e039c92f0583096a615cd9 Mon Sep 17 00:00:00 2001 From: Lloyd Dakin Date: Fri, 5 Apr 2024 15:07:49 -0700 Subject: [PATCH 4/5] custom percent completion, removed redundant caching check, add boto3 to poetry --- .../data_operations/data_operation.py | 68 ++++++++++ .../datalab_session/data_operations/median.py | 29 +--- datalab/datalab_session/tasks.py | 14 +- datalab/datalab_session/util.py | 36 ----- poetry.lock | 125 +++++++++++++++++- pyproject.toml | 1 + 6 files changed, 196 insertions(+), 77 deletions(-) diff --git a/datalab/datalab_session/data_operations/data_operation.py b/datalab/datalab_session/data_operations/data_operation.py index 0f285d3..c9f4256 100644 --- a/datalab/datalab_session/data_operations/data_operation.py +++ b/datalab/datalab_session/data_operations/data_operation.py @@ -1,9 +1,16 @@ from abc import ABC, abstractmethod import hashlib import json +import os +import tempfile from django.core.cache import cache +from fits2image.conversions import fits_to_jpg +from astropy.io import fits +import numpy as np + from datalab.datalab_session.tasks import execute_data_operation +from datalab.datalab_session.util import add_file_to_bucket, get_archive_from_basename CACHE_DURATION = 60 * 60 * 24 * 30 # cache for 30 days @@ -88,3 +95,64 @@ def set_output(self, output_data: dict): def get_output(self) -> dict: return cache.get(f'operation_{self.cache_key}_output') + + def create_add_thumbnails_to_bucket(self, hdu_list: fits.HDUList, percent=None, cur_percent=None) -> list: + if not type(hdu_list) == list: + hdu_list = [hdu_list] + + output = [] + total_files = len(hdu_list) + + # Create temp file paths for storing the products + fits_path = tempfile.NamedTemporaryFile(suffix=f'{self.cache_key}.fits').name + large_jpg_path = tempfile.NamedTemporaryFile(suffix=f'{self.cache_key}-large.jpg').name + thumbnail_jpg_path = tempfile.NamedTemporaryFile(suffix=f'{self.cache_key}-small.jpg').name + + for index, hdu in enumerate(hdu_list, start=1): + height, width = hdu[1].shape + + hdu.writeto(fits_path) + fits_to_jpg(fits_path, large_jpg_path, width=width, height=height) + fits_to_jpg(fits_path, thumbnail_jpg_path) + + # Save Fits and Thumbnails in S3 Buckets + fits_url = add_file_to_bucket(f'{self.cache_key}/{self.cache_key}-{index}.fits', fits_path) + large_jpg_url = add_file_to_bucket(f'{self.cache_key}/{self.cache_key}-{index}-large.jpg', large_jpg_path) + thumbnail_jpg_url = add_file_to_bucket(f'{self.cache_key}/{self.cache_key}-{index}-small.jpg', thumbnail_jpg_path) + + output.append({'large_url': large_jpg_url, 'thumbnail_url': thumbnail_jpg_url}) + + if percent is not None and cur_percent is not None: + self.set_percent_completion(cur_percent + index/total_files * percent) + + return output + + def get_fits_npdata(self, input_files: list[dict], percent=None, cur_percent=None) -> list[np.memmap]: + total_files = len(input_files) + memmap_paths = [] + + # get the fits urls, download their file, extract the image data, and store in a list + with tempfile.TemporaryDirectory() as temp_dir: + for index, file_info in enumerate(input_files, start=1): + basename = file_info.get('basename', 'No basename found') + archive_record = get_archive_from_basename(basename) + + try: + fits_url = archive_record[0].get('url', 'No URL found') + except IndexError: + continue + + with fits.open(fits_url) as hdu_list: + data = hdu_list['SCI'].data + memmap_path = os.path.join(temp_dir, f'memmap_{index}.dat') + memmap_array = np.memmap(memmap_path, dtype=data.dtype, mode='w+', shape=data.shape) + memmap_array[:] = data[:] + memmap_paths.append(memmap_path) + + if percent is not None and cur_percent is not None: + self.set_percent_completion(cur_percent + index/total_files * percent) + + return [ + np.memmap(path, dtype=np.float32, mode='r', shape=memmap_array.shape) + for path in memmap_paths + ] diff --git a/datalab/datalab_session/data_operations/median.py b/datalab/datalab_session/data_operations/median.py index 99229fa..c7242de 100644 --- a/datalab/datalab_session/data_operations/median.py +++ b/datalab/datalab_session/data_operations/median.py @@ -1,11 +1,9 @@ import logging -import tempfile import numpy as np -from fits2image.conversions import fits_to_jpg from datalab.datalab_session.data_operations.data_operation import BaseDataOperation -from datalab.datalab_session.util import add_file_to_bucket, create_fits, stack_arrays, load_image_data_from_fits_urls +from datalab.datalab_session.util import create_fits, stack_arrays log = logging.getLogger() log.setLevel(logging.INFO) @@ -44,35 +42,18 @@ def operate(self, input_files, cache_key): log.info(f'Executing median operation on {len(input_files)} files') - image_data_list = load_image_data_from_fits_urls(input_files) - - self.set_percent_completion(0.4) + image_data_list = self.get_fits_npdata(input_files, percent=40.0, cur_percent=0.0) stacked_data = stack_arrays(image_data_list) + # using the numpy library's median method median = np.median(stacked_data, axis=2) hdu_list = create_fits(cache_key, median) - # Create the output files to be stored in S3 - fits_path = tempfile.NamedTemporaryFile(suffix=f'{cache_key}.fits').name - large_jpg_path = tempfile.NamedTemporaryFile(suffix=f'{cache_key}-large.jpg').name - thumbnail_jpg_path = tempfile.NamedTemporaryFile(suffix=f'{cache_key}-small.jpg').name - - hdu_list.writeto(fits_path) - fits_to_jpg(fits_path, large_jpg_path, width=median.shape[0], height=median.shape[1]) - fits_to_jpg(fits_path, thumbnail_jpg_path) - - self.set_percent_completion(0.7) - - # Save Fits and Thumbnails in S3 Buckets - fits_url = add_file_to_bucket(f'{cache_key}/{cache_key}.fits', fits_path) - large_jpg_url = add_file_to_bucket(f'{cache_key}/{cache_key}-large.jpg', large_jpg_path) - thumbnail_jpg_url = add_file_to_bucket(f'{cache_key}/{cache_key}-small.jpg', thumbnail_jpg_path) - - self.set_percent_completion(0.9) + output = self.create_add_thumbnails_to_bucket(hdu_list, percent=60.0, cur_percent=40.0) - output = {'output_files': [large_jpg_url, thumbnail_jpg_url]} + output = {'output_files': output} log.info(f'Median operation output: {output}') self.set_percent_completion(1) diff --git a/datalab/datalab_session/tasks.py b/datalab/datalab_session/tasks.py index d52b6ba..4000dae 100644 --- a/datalab/datalab_session/tasks.py +++ b/datalab/datalab_session/tasks.py @@ -19,16 +19,4 @@ def execute_data_operation(data_operation_name: str, input_data: dict): operation = operation_class(input_data) cache_key = operation.generate_cache_key() - # check if we've done this operation already - if key_exists(operation.generate_cache_key()): - log.info(f'Operation {cache_key} cached') - - large_jpg_url = get_presigned_url(f'{cache_key}/{cache_key}-large.jpg') - thumbnail_jpg_url = get_presigned_url(f'{cache_key}/{cache_key}-small.jpg') - - output = {'output_files': [large_jpg_url, thumbnail_jpg_url]} - - operation.set_percent_completion(1) - operation.set_output(output) - else: - operation.operate(input_data.get('input_files', []), cache_key) + operation.operate(input_data.get('input_files', []), cache_key) diff --git a/datalab/datalab_session/util.py b/datalab/datalab_session/util.py index d481b16..91ce4b5 100644 --- a/datalab/datalab_session/util.py +++ b/datalab/datalab_session/util.py @@ -1,7 +1,5 @@ import requests import logging -import tempfile -import os import boto3 from astropy.io import fits @@ -120,37 +118,3 @@ def stack_arrays(array_list: list): stacked = np.stack(cropped_data_list, axis=2) return stacked - -def load_image_data_from_fits_urls(input_files: list[dict]) -> list[np.memmap]: - """ - Load image data from FITS URLs and return a list of memory-mapped arrays. - - Args: - input_files (list): A list of dictionaries containing file information. - - Returns: - list: A list of memory-mapped arrays containing the image data. - """ - memmap_paths = [] - - with tempfile.TemporaryDirectory() as temp_dir: - for index, file_info in enumerate(input_files): - basename = file_info.get('basename', 'No basename found') - archive_record = get_archive_from_basename(basename) - - try: - fits_url = archive_record[0].get('url', 'No URL found') - except IndexError: - continue - - with fits.open(fits_url) as hdu_list: - data = hdu_list['SCI'].data - memmap_path = os.path.join(temp_dir, f'memmap_{index}.dat') - memmap_array = np.memmap(memmap_path, dtype=data.dtype, mode='w+', shape=data.shape) - memmap_array[:] = data[:] - memmap_paths.append(memmap_path) - - return [ - np.memmap(path, dtype=np.float32, mode='r', shape=memmap_array.shape) - for path in memmap_paths - ] diff --git a/poetry.lock b/poetry.lock index c9f43c3..960dc90 100644 --- a/poetry.lock +++ b/poetry.lock @@ -99,6 +99,49 @@ files = [ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] +[[package]] +name = "boto3" +version = "1.34.77" +description = "The AWS SDK for Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.34.77-py3-none-any.whl", hash = "sha256:7abd327980258ec2ae980d2ff7fc32ede7448146b14d34c56bf0be074e2a149b"}, + {file = "boto3-1.34.77.tar.gz", hash = "sha256:8ebed4fa5a3b84dd4037f28226985af00e00fb860d739fc8b1ed6381caa4b330"}, +] + +[package.dependencies] +botocore = ">=1.34.77,<1.35.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.34.77" +description = "Low-level, data-driven core of boto 3." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.34.77-py3-none-any.whl", hash = "sha256:6d6a402032ca0b89525212356a865397f8f2839683dd53d41b8cee1aa84b2b4b"}, + {file = "botocore-1.34.77.tar.gz", hash = "sha256:6dab60261cdbfb7d0059488ea39408d5522fad419c004ba5db3484e6df854ea8"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, +] + +[package.extras] +crt = ["awscrt (==0.19.19)"] + [[package]] name = "certifi" version = "2024.2.2" @@ -605,6 +648,18 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + [[package]] name = "numpy" version = "1.26.4" @@ -960,6 +1015,21 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "pytz" version = "2024.1" @@ -1088,6 +1158,36 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "s3transfer" +version = "0.10.1" +description = "An Amazon S3 Transfer Manager" +category = "main" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, + {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + [[package]] name = "sqlparse" version = "0.4.4" @@ -1143,14 +1243,31 @@ files = [ [[package]] name = "urllib3" -version = "2.2.0" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "urllib3" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1162,4 +1279,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "ae7148f25a2f32f5bd8c6fcc8b5eab56a85443598f4edca498bef8ebb834bced" +content-hash = "174ca357a88b4c18fd49e5da372bda4200f06bcc4ca294c6ba3119b1d866c27c" diff --git a/pyproject.toml b/pyproject.toml index ffaa207..0936b9a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ hiredis = "^2.3.2" numpy = "^1.26.4" astropy = "^6.0.1" fits2image = "^0.4.7" +boto3 = "^1.34.77" [tool.poetry.group.dev.dependencies] pytest = "^7.4.3" From 4b6640cb7e09c850231a91a86cd2b1ecb4798c0d Mon Sep 17 00:00:00 2001 From: Lloyd Dakin Date: Tue, 9 Apr 2024 11:48:01 -0700 Subject: [PATCH 5/5] function and comment tweak --- datalab/datalab_session/data_operations/data_operation.py | 4 +++- datalab/datalab_session/data_operations/median.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/datalab/datalab_session/data_operations/data_operation.py b/datalab/datalab_session/data_operations/data_operation.py index c9f4256..a9aa37a 100644 --- a/datalab/datalab_session/data_operations/data_operation.py +++ b/datalab/datalab_session/data_operations/data_operation.py @@ -96,7 +96,9 @@ def set_output(self, output_data: dict): def get_output(self) -> dict: return cache.get(f'operation_{self.cache_key}_output') - def create_add_thumbnails_to_bucket(self, hdu_list: fits.HDUList, percent=None, cur_percent=None) -> list: + # percent lets you alocate a fraction of the operation that this takes up in time + # cur_percent is the current completion of the operation + def create_and_store_fits(self, hdu_list: fits.HDUList, percent=None, cur_percent=None) -> list: if not type(hdu_list) == list: hdu_list = [hdu_list] diff --git a/datalab/datalab_session/data_operations/median.py b/datalab/datalab_session/data_operations/median.py index c7242de..e67e286 100644 --- a/datalab/datalab_session/data_operations/median.py +++ b/datalab/datalab_session/data_operations/median.py @@ -51,7 +51,7 @@ def operate(self, input_files, cache_key): hdu_list = create_fits(cache_key, median) - output = self.create_add_thumbnails_to_bucket(hdu_list, percent=60.0, cur_percent=40.0) + output = self.create_and_store_fits(hdu_list, percent=60.0, cur_percent=40.0) output = {'output_files': output}