Skip to content

Commit 706ee4b

Browse files
author
Andrei Neagu
committed
reshuffled imports
1 parent 3873976 commit 706ee4b

File tree

6 files changed

+12
-13
lines changed

6 files changed

+12
-13
lines changed

packages/aws-library/src/aws_library/s3/_client.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
from servicelib.utils import limited_gather
2222
from servicelib.zip_stream import (
2323
DEFAULT_READ_CHUNK_SIZE,
24-
MULTIPART_UPLOADS_MIN_TOTAL_SIZE,
2524
FileSize,
2625
FileStream,
2726
FileStreamCallable,
@@ -31,7 +30,7 @@
3130
from types_aiobotocore_s3.literals import BucketLocationConstraintType
3231
from types_aiobotocore_s3.type_defs import ObjectIdentifierTypeDef
3332

34-
from ._constants import MULTIPART_COPY_THRESHOLD
33+
from ._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE
3534
from ._error_handler import s3_exception_handler, s3_exception_handler_async_gen
3635
from ._errors import S3DestinationNotEmptyError, S3KeyNotFoundError
3736
from ._models import (

packages/aws-library/src/aws_library/s3/_constants.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@
66
MIN_MULTIPART_UPLOAD_CHUNK_SIZE: Final[int] = TypeAdapter(ByteSize).validate_python(
77
"5MiB"
88
)
9-
MULTIPART_COPY_THRESHOLD: Final[ByteSize] = TypeAdapter(ByteSize).validate_python(
10-
"100MiB"
11-
)
9+
MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = TypeAdapter(
10+
ByteSize
11+
).validate_python("100MiB")
1212

1313
PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5GiB")
1414
S3_MAX_FILE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5TiB")

packages/aws-library/tests/test_s3_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
import pytest
2525
from aiohttp import ClientSession
2626
from aws_library.s3._client import S3ObjectKey, SimcoreS3API
27+
from aws_library.s3._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE
2728
from aws_library.s3._errors import (
2829
S3BucketInvalidError,
2930
S3DestinationNotEmptyError,
@@ -59,7 +60,6 @@
5960
from servicelib.progress_bar import ProgressBarData
6061
from servicelib.utils import limited_as_completed
6162
from servicelib.zip_stream import (
62-
MULTIPART_UPLOADS_MIN_TOTAL_SIZE,
6363
ArchiveEntries,
6464
DiskStreamReader,
6565
get_zip_archive_stream,

packages/service-library/src/servicelib/zip_stream/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from ._constants import DEFAULT_READ_CHUNK_SIZE, MULTIPART_UPLOADS_MIN_TOTAL_SIZE
1+
from ._constants import DEFAULT_READ_CHUNK_SIZE, MIN_MULTIPART_UPLOAD_CHUNK_SIZE
22
from ._input import DiskStreamReader
33
from ._output import DiskStreamWriter
44
from ._types import (
@@ -20,5 +20,5 @@
2020
"FileStream",
2121
"FileStreamCallable",
2222
"get_zip_archive_stream",
23-
"MULTIPART_UPLOADS_MIN_TOTAL_SIZE",
23+
"MIN_MULTIPART_UPLOAD_CHUNK_SIZE",
2424
)

packages/service-library/src/servicelib/zip_stream/_constants.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,6 @@
33
from pydantic import ByteSize, TypeAdapter
44

55
DEFAULT_READ_CHUNK_SIZE: Final[int] = TypeAdapter(ByteSize).validate_python("1MiB")
6-
MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = TypeAdapter(
7-
ByteSize
8-
).validate_python("100MiB")
6+
MIN_MULTIPART_UPLOAD_CHUNK_SIZE: Final[int] = TypeAdapter(ByteSize).validate_python(
7+
"5MiB"
8+
)

packages/service-library/src/servicelib/zip_stream/_zipper.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from stream_zip import ZIP_32, AsyncMemberFile, async_stream_zip
66

77
from ..progress_bar import ProgressBarData
8-
from ._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE
8+
from ._constants import MIN_MULTIPART_UPLOAD_CHUNK_SIZE
99
from ._types import ArchiveEntries, FileSize, FileStream
1010

1111

@@ -26,7 +26,7 @@ async def get_zip_archive_stream(
2626
archive_files: ArchiveEntries,
2727
*,
2828
progress_bar: ProgressBarData | None = None,
29-
chunk_size: int = MULTIPART_UPLOADS_MIN_TOTAL_SIZE,
29+
chunk_size: int = MIN_MULTIPART_UPLOAD_CHUNK_SIZE,
3030
) -> FileStream:
3131
# NOTE: this is CPU bound task, even though the loop is not blocked,
3232
# the CPU is still used for compressing the content.

0 commit comments

Comments
 (0)