-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy paths3_multipart_upload.py
48 lines (37 loc) · 1.62 KB
/
s3_multipart_upload.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
# https://medium.com/@niyazi_erd/aws-s3-multipart-upload-with-python-and-boto3-9d2a0ef9b085
import threading
import boto3
import os
import sys
from boto3.s3.transfer import TransferConfig
BUCKET_NAME = "YOUR_BUCKET_NAME"
def multi_part_upload_with_s3():
# Multipart upload
config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
multipart_chunksize=1024 * 25, use_threads=True)
file_path = os.path.dirname(__file__) + '/largefile.pdf'
key_path = 'multipart_files/largefile.pdf'
s3.meta.client.upload_file(file_path, BUCKET_NAME, key_path,
ExtraArgs={'ACL': 'public-read', 'ContentType': 'text/pdf'},
Config=config,
Callback=ProgressPercentage(file_path)
)
class ProgressPercentage(object):
def __init__(self, filename):
self._filename = filename
self._size = float(os.path.getsize(filename))
self._seen_so_far = 0
self._lock = threading.Lock()
def __call__(self, bytes_amount):
# To simplify we'll assume this is hooked up
# to a single filename.
with self._lock:
self._seen_so_far += bytes_amount
percentage = (self._seen_so_far / self._size) * 100
sys.stdout.write(
"\r%s %s / %s (%.2f%%)" % (
self._filename, self._seen_so_far, self._size,
percentage))
sys.stdout.flush()
if __name__ == '__main__':
multi_part_upload_with_s3()