File tree Expand file tree Collapse file tree 6 files changed +12
-13
lines changed
service-library/src/servicelib/zip_stream Expand file tree Collapse file tree 6 files changed +12
-13
lines changed Original file line number Diff line number Diff line change 21
21
from servicelib .utils import limited_gather
22
22
from servicelib .zip_stream import (
23
23
DEFAULT_READ_CHUNK_SIZE ,
24
- MULTIPART_UPLOADS_MIN_TOTAL_SIZE ,
25
24
FileSize ,
26
25
FileStream ,
27
26
FileStreamCallable ,
31
30
from types_aiobotocore_s3 .literals import BucketLocationConstraintType
32
31
from types_aiobotocore_s3 .type_defs import ObjectIdentifierTypeDef
33
32
34
- from ._constants import MULTIPART_COPY_THRESHOLD
33
+ from ._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE
35
34
from ._error_handler import s3_exception_handler , s3_exception_handler_async_gen
36
35
from ._errors import S3DestinationNotEmptyError , S3KeyNotFoundError
37
36
from ._models import (
Original file line number Diff line number Diff line change 6
6
MIN_MULTIPART_UPLOAD_CHUNK_SIZE : Final [int ] = TypeAdapter (ByteSize ).validate_python (
7
7
"5MiB"
8
8
)
9
- MULTIPART_COPY_THRESHOLD : Final [ByteSize ] = TypeAdapter ( ByteSize ). validate_python (
10
- "100MiB"
11
- )
9
+ MULTIPART_UPLOADS_MIN_TOTAL_SIZE : Final [ByteSize ] = TypeAdapter (
10
+ ByteSize
11
+ ). validate_python ( "100MiB" )
12
12
13
13
PRESIGNED_LINK_MAX_SIZE : Final [ByteSize ] = TypeAdapter (ByteSize ).validate_python ("5GiB" )
14
14
S3_MAX_FILE_SIZE : Final [ByteSize ] = TypeAdapter (ByteSize ).validate_python ("5TiB" )
Original file line number Diff line number Diff line change 24
24
import pytest
25
25
from aiohttp import ClientSession
26
26
from aws_library .s3 ._client import S3ObjectKey , SimcoreS3API
27
+ from aws_library .s3 ._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE
27
28
from aws_library .s3 ._errors import (
28
29
S3BucketInvalidError ,
29
30
S3DestinationNotEmptyError ,
59
60
from servicelib .progress_bar import ProgressBarData
60
61
from servicelib .utils import limited_as_completed
61
62
from servicelib .zip_stream import (
62
- MULTIPART_UPLOADS_MIN_TOTAL_SIZE ,
63
63
ArchiveEntries ,
64
64
DiskStreamReader ,
65
65
get_zip_archive_stream ,
Original file line number Diff line number Diff line change 1
- from ._constants import DEFAULT_READ_CHUNK_SIZE , MULTIPART_UPLOADS_MIN_TOTAL_SIZE
1
+ from ._constants import DEFAULT_READ_CHUNK_SIZE , MIN_MULTIPART_UPLOAD_CHUNK_SIZE
2
2
from ._input import DiskStreamReader
3
3
from ._output import DiskStreamWriter
4
4
from ._types import (
20
20
"FileStream" ,
21
21
"FileStreamCallable" ,
22
22
"get_zip_archive_stream" ,
23
- "MULTIPART_UPLOADS_MIN_TOTAL_SIZE " ,
23
+ "MIN_MULTIPART_UPLOAD_CHUNK_SIZE " ,
24
24
)
Original file line number Diff line number Diff line change 3
3
from pydantic import ByteSize , TypeAdapter
4
4
5
5
DEFAULT_READ_CHUNK_SIZE : Final [int ] = TypeAdapter (ByteSize ).validate_python ("1MiB" )
6
- MULTIPART_UPLOADS_MIN_TOTAL_SIZE : Final [ByteSize ] = TypeAdapter (
7
- ByteSize
8
- ). validate_python ( "100MiB" )
6
+ MIN_MULTIPART_UPLOAD_CHUNK_SIZE : Final [int ] = TypeAdapter ( ByteSize ). validate_python (
7
+ "5MiB"
8
+ )
Original file line number Diff line number Diff line change 5
5
from stream_zip import ZIP_32 , AsyncMemberFile , async_stream_zip
6
6
7
7
from ..progress_bar import ProgressBarData
8
- from ._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE
8
+ from ._constants import MIN_MULTIPART_UPLOAD_CHUNK_SIZE
9
9
from ._types import ArchiveEntries , FileSize , FileStream
10
10
11
11
@@ -26,7 +26,7 @@ async def get_zip_archive_stream(
26
26
archive_files : ArchiveEntries ,
27
27
* ,
28
28
progress_bar : ProgressBarData | None = None ,
29
- chunk_size : int = MULTIPART_UPLOADS_MIN_TOTAL_SIZE ,
29
+ chunk_size : int = MIN_MULTIPART_UPLOAD_CHUNK_SIZE ,
30
30
) -> FileStream :
31
31
# NOTE: this is CPU bound task, even though the loop is not blocked,
32
32
# the CPU is still used for compressing the content.
You can’t perform that action at this time.
0 commit comments