Skip to content

Commit 0b22063

Browse files
committed
gwy: squashing #188: capture uploads via web UI
1 parent 90b0dec commit 0b22063

File tree

17 files changed

+2011
-105
lines changed

17 files changed

+2011
-105
lines changed

.pre-commit-config.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,21 +31,21 @@ repos:
3131
# biome-format Format the committed files
3232
# biome-lint Lint and apply safe fixes to the committed files
3333
- repo: https://github.com/biomejs/pre-commit
34-
rev: v2.2.2
34+
rev: v2.2.4
3535
hooks:
3636
- id: biome-check
3737
additional_dependencies: ["@biomejs/biome@^1.0.0"]
3838

3939
# automatically upgrades Django code to migrates patterns and avoid deprecation warnings
4040
- repo: https://github.com/adamchainz/django-upgrade
41-
rev: "1.27.0"
41+
rev: "1.28.0"
4242
hooks:
4343
- id: django-upgrade
4444
args: ["--target-version", "4.2"]
4545

4646
# runs the ruff linter and formatter
4747
- repo: https://github.com/astral-sh/ruff-pre-commit
48-
rev: v0.12.11
48+
rev: v0.13.0
4949
hooks:
5050
# linter
5151
- id: ruff # runs ruff check --force-exclude

gateway/config/settings/base.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -507,3 +507,16 @@ def __get_random_token(length: int) -> str:
507507
"SDS_NEW_USERS_APPROVED_ON_CREATION",
508508
default=False,
509509
)
510+
511+
# File upload limits
512+
# ------------------------------------------------------------------------------
513+
# Maximum number of files that can be uploaded at once
514+
DATA_UPLOAD_MAX_NUMBER_FILES: int = env.int(
515+
"DATA_UPLOAD_MAX_NUMBER_FILES", default=1000
516+
)
517+
518+
# Maximum memory size for file uploads (default: 2.5MB, increased to 100MB)
519+
DATA_UPLOAD_MAX_MEMORY_SIZE: int = env.int(
520+
"DATA_UPLOAD_MAX_MEMORY_SIZE",
521+
default=104857600, # 100MB
522+
)
Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
from http import HTTPStatus
2+
3+
from rest_framework import status
4+
from rest_framework.parsers import MultiPartParser
5+
from rest_framework.request import Request
6+
from rest_framework.test import APIRequestFactory
7+
8+
from sds_gateway.api_methods.views.capture_endpoints import CaptureViewSet
9+
from sds_gateway.api_methods.views.file_endpoints import CheckFileContentsExistView
10+
from sds_gateway.api_methods.views.file_endpoints import FileViewSet
11+
12+
13+
def upload_file_helper_simple(request, file_data):
14+
"""Upload a single file using FileViewSet.create.
15+
16+
file_data should contain all required fields: name, directory, file,
17+
media_type, etc. Returns ([response], []) for success, ([], [error]) for
18+
error, and handles 409 as a warning.
19+
"""
20+
factory = APIRequestFactory()
21+
django_request = factory.post(
22+
request.path,
23+
file_data,
24+
format="multipart",
25+
)
26+
django_request.user = request.user
27+
drf_request = Request(django_request, parsers=[MultiPartParser()])
28+
drf_request.user = request.user
29+
view = FileViewSet()
30+
view.request = drf_request
31+
view.action = "create"
32+
view.format_kwarg = None
33+
view.args = ()
34+
view.kwargs = {}
35+
try:
36+
response = view.create(drf_request)
37+
except (ValueError, TypeError, AttributeError, KeyError) as e:
38+
return [], [f"Data validation error: {e}"]
39+
else:
40+
responses = []
41+
errors = []
42+
43+
if not hasattr(response, "status_code"):
44+
errors.append(getattr(response, "data", str(response)))
45+
else:
46+
http_status = HTTPStatus(response.status_code)
47+
response_data = getattr(response, "data", str(response))
48+
49+
if http_status.is_success:
50+
responses.append(response)
51+
elif response.status_code == status.HTTP_409_CONFLICT:
52+
# Already exists, treat as warning
53+
errors.append(response_data)
54+
elif http_status.is_server_error:
55+
# Handle 500 and other server errors
56+
errors.append("Internal server error")
57+
elif http_status.is_client_error:
58+
# Handle 4xx client errors
59+
errors.append(f"Client error ({response.status_code}): {response_data}")
60+
else:
61+
# Handle any other status codes
62+
errors.append(response_data)
63+
64+
return responses, errors
65+
66+
67+
# TODO: Use this helper method when implementing the file upload mode multiplexer.
68+
def check_file_contents_exist_helper(request, check_data):
69+
"""Call the post method of CheckFileContentsExistView with the given data.
70+
71+
check_data should contain the required fields: directory, name, sum_blake3,
72+
etc.
73+
"""
74+
factory = APIRequestFactory()
75+
django_request = factory.post(
76+
request.path, # or a specific path for the check endpoint
77+
check_data,
78+
format="multipart",
79+
)
80+
django_request.user = request.user
81+
drf_request = Request(django_request, parsers=[MultiPartParser()])
82+
drf_request.user = request.user
83+
view = CheckFileContentsExistView()
84+
view.request = drf_request
85+
view.action = None
86+
view.format_kwarg = None
87+
view.args = ()
88+
view.kwargs = {}
89+
return view.post(drf_request)
90+
91+
92+
def create_capture_helper_simple(request, capture_data):
93+
"""Create a capture using CaptureViewSet.create.
94+
95+
capture_data should contain all required fields for capture creation:
96+
owner, top_level_dir, capture_type, channel, index_name, etc.
97+
Returns ([response], []) for success, ([], [error]) for error, and handles
98+
409 as a warning.
99+
"""
100+
factory = APIRequestFactory()
101+
django_request = factory.post(
102+
request.path,
103+
capture_data,
104+
format="multipart",
105+
)
106+
django_request.user = request.user
107+
drf_request = Request(django_request, parsers=[MultiPartParser()])
108+
drf_request.user = request.user
109+
view = CaptureViewSet()
110+
view.request = drf_request
111+
view.action = "create"
112+
view.format_kwarg = None
113+
view.args = ()
114+
view.kwargs = {}
115+
# Set the context for the serializer
116+
view.get_serializer_context = lambda: {"request_user": request.user}
117+
try:
118+
response = view.create(drf_request)
119+
except (ValueError, TypeError, AttributeError, KeyError) as e:
120+
return [], [f"Data validation error: {e}"]
121+
else:
122+
responses = []
123+
errors = []
124+
125+
if not hasattr(response, "status_code"):
126+
errors.append(getattr(response, "data", str(response)))
127+
else:
128+
http_status = HTTPStatus(response.status_code)
129+
response_data = getattr(response, "data", str(response))
130+
131+
if http_status.is_success:
132+
responses.append(response)
133+
elif response.status_code == status.HTTP_409_CONFLICT:
134+
# Already exists, treat as warning
135+
errors.append(response_data)
136+
elif http_status.is_server_error:
137+
# Handle 500 and other server errors
138+
errors.append(f"Server error ({response.status_code}): {response_data}")
139+
elif http_status.is_client_error:
140+
# Handle 4xx client errors
141+
errors.append(f"Client error ({response.status_code}): {response_data}")
142+
else:
143+
# Handle any other status codes
144+
errors.append(response_data)
145+
146+
return responses, errors

gateway/sds_gateway/api_methods/serializers/file_serializers.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,6 @@ def check_file_contents_exist(
219219
user=user,
220220
)
221221

222-
log.debug(f"Checking file contents for user in directory: {safe_dir}")
223222
identical_file: File | None = identical_user_owned_file.filter(
224223
directory=safe_dir,
225224
name=name,
@@ -242,14 +241,12 @@ def check_file_contents_exist(
242241
user_mutable_attributes_differ = True
243242
break
244243

245-
payload = {
244+
return {
246245
"file_exists_in_tree": identical_file is not None,
247246
"file_contents_exist_for_user": file_contents_exist_for_user,
248247
"user_mutable_attributes_differ": user_mutable_attributes_differ,
249248
"asset_id": asset.uuid if asset else None,
250249
}
251-
log.debug(payload)
252-
return payload
253250

254251

255252
class FileCheckResponseSerializer(serializers.Serializer[File]):

gateway/sds_gateway/api_methods/tasks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -758,7 +758,7 @@ def _process_item_files(
758758
# Get available space for logging
759759
media_root = Path(settings.MEDIA_ROOT)
760760
try:
761-
total, used, free = shutil.disk_usage(media_root)
761+
_total, _used, free = shutil.disk_usage(media_root)
762762
available_space = free - DISK_SPACE_BUFFER
763763
except (OSError, ValueError):
764764
available_space = 0

gateway/sds_gateway/api_methods/tests/test_capture_endpoints.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1594,7 +1594,7 @@ def setUp(self) -> None:
15941594
password="testpassword", # noqa: S106
15951595
is_approved=True,
15961596
)
1597-
api_key, key = UserAPIKey.objects.create_key(
1597+
_api_key, key = UserAPIKey.objects.create_key(
15981598
name="test-key",
15991599
user=self.user,
16001600
)

gateway/sds_gateway/api_methods/utils/disk_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def check_disk_space_available(
2929
directory = Path(settings.MEDIA_ROOT)
3030

3131
try:
32-
total, used, free = shutil.disk_usage(directory)
32+
_total, _used, free = shutil.disk_usage(directory)
3333
available_space = free - DISK_SPACE_BUFFER
3434
except (OSError, ValueError) as e:
3535
logger.error(f"Error checking disk space for {directory}: {e}")

gateway/sds_gateway/api_methods/utils/metadata_schemas.py

Lines changed: 32 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
1-
# ruff: noqa: E501
21
# for full schema definition, see https://github.com/spectrumx/schema-definitions/blob/master/definitions/sds/metadata-formats/digital-rf/README.md
32
# the mapping below is used for drf capture metadata parsing in extract_drf_metadata.py
43

54
import logging
5+
from typing import TYPE_CHECKING
66
from typing import Any
77

8-
from sds_gateway.api_methods.models import CaptureType
8+
if TYPE_CHECKING:
9+
from sds_gateway.api_methods.models import CaptureType
910

1011
log = logging.getLogger(__name__)
1112

@@ -85,23 +86,27 @@
8586
},
8687
"init_utc_timestamp": {
8788
"type": int,
88-
"description": "UTC timestamp of each restart of the recorder; needed if leap seconds correction applied.",
89+
"description": "UTC timestamp of each restart of the recorder; needed if "
90+
"leap seconds correction applied.",
8991
},
9092
"computer_time": {
9193
"type": int,
92-
"description": "Computer time at creation of individual RF file (unix time).",
94+
"description": "Computer time at creation of "
95+
"individual RF file (unix time).",
9396
},
9497
"uuid_str": {
9598
"type": str,
96-
"description": "UUID of the capture; set independently at each restart of the recorder.",
99+
"description": "UUID of the capture; set independently "
100+
"at each restart of the recorder.",
97101
},
98102
"center_freq": {
99103
"type": int,
100104
"description": "The center frequency of the capture.",
101105
},
102106
"center_frequencies": {
103107
"type": list[float],
104-
"description": "The center frequencies (one per subchannel) of the capture.",
108+
"description": "The center frequencies (one per subchannel) "
109+
"of the capture.",
105110
},
106111
"span": {
107112
"type": int,
@@ -357,10 +362,8 @@
357362
"capture_props",
358363
]
359364

360-
capture_index_mapping_by_type: dict[CaptureType, dict[str, dict[str, Any]]] = {
361-
CaptureType.DigitalRF: drf_capture_index_mapping,
362-
CaptureType.RadioHound: rh_capture_index_mapping,
363-
}
365+
# This will be populated at runtime to avoid circular imports
366+
capture_index_mapping_by_type = {}
364367

365368
base_properties = {
366369
"channel": {"type": "keyword"},
@@ -387,9 +390,20 @@
387390

388391

389392
def get_mapping_by_capture_type(
390-
capture_type: CaptureType,
393+
capture_type: Any,
391394
) -> dict[str, str | dict[str, Any]]:
392395
"""Get the mapping for a given capture type."""
396+
# Local import to avoid circular dependency
397+
from sds_gateway.api_methods.models import CaptureType
398+
399+
# Initialize mapping if not already done
400+
if not capture_index_mapping_by_type:
401+
capture_index_mapping_by_type.update(
402+
{
403+
CaptureType.DigitalRF: drf_capture_index_mapping,
404+
CaptureType.RadioHound: rh_capture_index_mapping,
405+
}
406+
)
393407

394408
return {
395409
"properties": {
@@ -406,14 +420,17 @@ def get_mapping_by_capture_type(
406420
}
407421

408422

409-
def infer_index_name(capture_type: CaptureType) -> str:
423+
def infer_index_name(capture_type: "CaptureType") -> str:
410424
"""Infer the index name for a given capture."""
411-
# Populate index_name based on capture type
425+
# Local import to avoid circular dependency
426+
from sds_gateway.api_methods.models import CaptureType
427+
428+
# Handle enum inputs (strings match fine against StrEnum)
412429
match capture_type:
413430
case CaptureType.DigitalRF:
414-
return f"captures-{CaptureType.DigitalRF}"
431+
return f"captures-{capture_type.value}"
415432
case CaptureType.RadioHound:
416-
return f"captures-{CaptureType.RadioHound}"
433+
return f"captures-{capture_type.value}"
417434
case _:
418435
msg = f"Invalid capture type: {capture_type}"
419436
log.error(msg)

0 commit comments

Comments
 (0)