@@ -3,16 +3,16 @@ import {
3
3
DeleteObjectsCommand ,
4
4
GetObjectCommand ,
5
5
ListObjectsV2Command ,
6
- S3Client ,
6
+ S3Client
7
7
} from "@aws-sdk/client-s3" ;
8
8
import { getSignedUrl } from "@aws-sdk/s3-request-presigner" ;
9
9
import { Drive } from "@prisma/client" ;
10
- import { cryptoHexEncodedHash256 , cryptoMd5Method , signRequest } from "@util/helpers/s3-helpers" ;
11
- import { DriveFile , DriveFolder , UploadingFile } from "@util/types" ;
12
- import Evaporate from "evaporate " ;
10
+ import { calculateVariablePartSize } from "@util/helpers/s3-helpers" ;
11
+ import { DriveFile , DriveFolder , Provider , UploadingFile } from "@util/types" ;
12
+ import { Upload } from "@util/upload " ;
13
13
import mime from "mime-types" ;
14
14
import { nanoid } from "nanoid" ;
15
- import { createContext , useContext , useEffect , useState } from "react" ;
15
+ import { createContext , useContext , useEffect , useRef , useState } from "react" ;
16
16
import toast from "react-hot-toast" ;
17
17
import { ContextValue , ROOT_FOLDER } from "./useBucket" ;
18
18
import useUser from "./useUser" ;
@@ -31,6 +31,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
31
31
region : data . keys . region ,
32
32
maxAttempts : 1 ,
33
33
credentials : { accessKeyId : data . keys . accessKey , secretAccessKey : data . keys . secretKey } ,
34
+ ...( data . keys ?. endpoint ? { endpoint : data . keys . endpoint } : { } ) ,
34
35
} )
35
36
) ;
36
37
const [ loading , setLoading ] = useState ( false ) ;
@@ -40,6 +41,24 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
40
41
const [ folders , setFolders ] = useState < DriveFolder [ ] > ( null ) ;
41
42
const [ uploadingFiles , setUploadingFiles ] = useState < UploadingFile [ ] > ( [ ] ) ;
42
43
const [ files , setFiles ] = useState < DriveFile [ ] > ( null ) ;
44
+ const isMounted = useRef ( false ) ;
45
+
46
+ // Fallback for old buckets not already having the bucketUrl.
47
+ useEffect ( ( ) => {
48
+ if ( isMounted . current || ! data ?. keys ) return ;
49
+ isMounted . current = true ;
50
+ if ( data . keys . bucketUrl ) return ;
51
+
52
+ if ( ( Provider [ data . type ] as Provider ) === Provider . s3 ) {
53
+ data . keys . bucketUrl = `https://${ data . keys . Bucket } .s3.${ data . keys . region } .amazonaws.com` ;
54
+ } else if ( ( Provider [ data . type ] as Provider ) === Provider . backblaze ) {
55
+ data . keys . bucketUrl = `https://${ data . keys . Bucket } .s3.${ data . keys . region } .backblazeb2.com` ;
56
+ }
57
+
58
+ return ( ) => {
59
+ isMounted . current = false ;
60
+ } ;
61
+ } , [ data ] ) ;
43
62
44
63
const addFolder = ( name : string ) => {
45
64
const path =
@@ -53,7 +72,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
53
72
parent : currentFolder . fullPath ,
54
73
createdAt : new Date ( ) . toISOString ( ) ,
55
74
bucketName : data . keys . Bucket ,
56
- bucketUrl : `https:// ${ data . keys . Bucket } .s3. ${ data . keys . region } .amazonaws.com` ,
75
+ bucketUrl : data . keys . bucketUrl ,
57
76
} ;
58
77
59
78
setFolders ( ( folders ) => [ ...folders , newFolder ] ) ;
@@ -79,112 +98,113 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
79
98
} ;
80
99
81
100
const addFile = async ( filesToUpload : File [ ] | FileList ) => {
82
- const evaporate = await Evaporate . create ( {
83
- bucket : data . keys . Bucket ,
84
- awsRegion : data . keys . region ,
85
- aws_key : data . keys . accessKey ,
86
- computeContentMd5 : true ,
87
- cryptoMd5Method,
88
- cryptoHexEncodedHash256,
89
- customAuthMethod : ( _ , __ , stringToSign ) => signRequest ( stringToSign , data . keys . secretKey ) ,
90
- logging : false ,
91
- } ) ;
101
+ Array . from ( filesToUpload ) . forEach ( async ( file ) => {
102
+ if ( / [ # \$ \[ \] \* / ] / . test ( file . name ) )
103
+ return toast . error ( "File name cannot contain special characters (#$[]*/)." ) ;
92
104
93
- Array . from ( filesToUpload ) . forEach ( async ( toUpload ) => {
94
- const id = nanoid ( ) ;
95
- if ( / [ # \$ \[ \] \* / ] / . test ( toUpload . name ) ) {
96
- toast . error ( "File name cannot contain special characters (#$[]*/)." ) ;
97
- return ;
98
- }
99
-
100
- if ( files ?. filter ( ( f ) => f . name === toUpload . name ) . length > 0 ) {
101
- toast . error ( "File with same name already exists." ) ;
102
- return ;
103
- }
105
+ if ( files ?. filter ( ( f ) => f . name === file . name ) . length > 0 )
106
+ return toast . error ( "File with same name already exists." ) ;
104
107
105
- const filePath =
108
+ const id = nanoid ( ) ;
109
+ const Key =
106
110
currentFolder === ROOT_FOLDER
107
- ? toUpload . name
108
- : `${ decodeURIComponent ( currentFolder . fullPath ) } ${ toUpload . name } ` ;
109
-
110
- evaporate . add ( {
111
- name : filePath ,
112
- file : toUpload ,
113
- contentType : mime . lookup ( toUpload . name ) || "application/octet-stream" ,
114
- uploadInitiated : ( ) => {
115
- setUploadingFiles ( ( prev ) =>
116
- prev . concat ( [
117
- {
118
- id,
119
- name : toUpload . name ,
120
- key : `${ data . keys . Bucket } /${ filePath } ` ,
121
- task : evaporate ,
122
- state : "running" ,
123
- progress : 0 ,
124
- error : false ,
125
- } ,
126
- ] )
127
- ) ;
128
- } ,
129
- progress : ( _ , stats ) => {
130
- setUploadingFiles ( ( prevUploadingFiles ) =>
131
- prevUploadingFiles . map ( ( uploadFile ) => {
132
- return uploadFile . id === id
133
- ? {
134
- ...uploadFile ,
135
- state : "running" ,
136
- progress : Math . round ( ( stats . totalUploaded / stats . fileSize ) * 100 ) ,
137
- }
138
- : uploadFile ;
139
- } )
140
- ) ;
141
- } ,
142
- paused : ( ) => {
143
- setUploadingFiles ( ( prevUploadingFiles ) =>
144
- prevUploadingFiles . map ( ( uploadFile ) => {
145
- return uploadFile . id === id ? { ...uploadFile , state : "paused" } : uploadFile ;
146
- } )
147
- ) ;
148
- } ,
149
- resumed : ( ) => {
150
- setUploadingFiles ( ( prevUploadingFiles ) =>
151
- prevUploadingFiles . map ( ( uploadFile ) => {
152
- return uploadFile . id === id ? { ...uploadFile , state : "running" } : uploadFile ;
153
- } )
154
- ) ;
111
+ ? file . name
112
+ : `${ decodeURIComponent ( currentFolder . fullPath ) } ${ file . name } ` ;
113
+
114
+ const upload = new Upload ( {
115
+ client : s3Client ,
116
+ params : {
117
+ Key,
118
+ Body : file ,
119
+ Bucket : data . keys . Bucket ,
120
+ ContentType : mime . lookup ( file . name ) || "application/octet-stream" ,
155
121
} ,
156
- error : ( _ ) => {
157
- setUploadingFiles ( ( prevUploadingFiles ) => {
158
- return prevUploadingFiles . map ( ( uploadFile ) => {
159
- if ( uploadFile . id === id ) return { ...uploadFile , error : true } ;
160
- return uploadFile ;
161
- } ) ;
122
+ partSize : calculateVariablePartSize ( file . size ) ,
123
+ } ) ;
124
+
125
+ upload . on ( "initiated" , ( ) => {
126
+ setUploadingFiles ( ( prev ) =>
127
+ prev . concat ( [
128
+ {
129
+ id,
130
+ name : file . name ,
131
+ key : Key ,
132
+ task : upload ,
133
+ state : "running" ,
134
+ progress : 0 ,
135
+ error : false ,
136
+ } ,
137
+ ] )
138
+ ) ;
139
+ } ) ;
140
+
141
+ upload . on ( "progress" , ( progress ) => {
142
+ setUploadingFiles ( ( prevUploadingFiles ) =>
143
+ prevUploadingFiles . map ( ( uploadFile ) => {
144
+ return uploadFile . id === id
145
+ ? {
146
+ ...uploadFile ,
147
+ state : "running" ,
148
+ progress : Number (
149
+ parseFloat ( ( ( progress . loaded / progress . total ) * 100 ) . toString ( ) ) . toFixed ( 2 )
150
+ ) ,
151
+ }
152
+ : uploadFile ;
153
+ } )
154
+ ) ;
155
+ } ) ;
156
+
157
+ upload . on ( "paused" , ( ) => {
158
+ setUploadingFiles ( ( prevUploadingFiles ) =>
159
+ prevUploadingFiles . map ( ( uploadFile ) => {
160
+ return uploadFile . id === id ? { ...uploadFile , state : "paused" } : uploadFile ;
161
+ } )
162
+ ) ;
163
+ } ) ;
164
+
165
+ upload . on ( "resumed" , ( ) => {
166
+ setUploadingFiles ( ( prevUploadingFiles ) =>
167
+ prevUploadingFiles . map ( ( uploadFile ) => {
168
+ return uploadFile . id === id ? { ...uploadFile , state : "running" } : uploadFile ;
169
+ } )
170
+ ) ;
171
+ } ) ;
172
+
173
+ upload . on ( "error" , ( err ) => {
174
+ toast . error ( err . message ) ;
175
+ setUploadingFiles ( ( prevUploadingFiles ) => {
176
+ return prevUploadingFiles . map ( ( uploadFile ) => {
177
+ if ( uploadFile . id === id ) return { ...uploadFile , error : true } ;
178
+ return uploadFile ;
162
179
} ) ;
163
- } ,
164
- complete : async ( _xhr , file_key ) => {
165
- console . log ( "complete" , decodeURIComponent ( file_key ) ) ;
166
- setUploadingFiles ( ( prevUploadingFiles ) =>
167
- prevUploadingFiles . filter ( ( uploadFile ) => uploadFile . id !== id )
168
- ) ;
169
- const newFile : DriveFile = {
170
- fullPath : filePath ,
171
- name : toUpload . name ,
172
- parent : currentFolder . fullPath ,
173
- size : toUpload . size . toString ( ) ,
174
- createdAt : new Date ( ) . toISOString ( ) ,
175
- contentType : mime . lookup ( toUpload . name ) || "application/octet-stream" ,
176
- bucketName : data . keys . Bucket ,
177
- bucketUrl : `https://${ data . keys . Bucket } .s3.${ data . keys . region } .amazonaws.com` ,
178
- url : await getSignedUrl (
179
- s3Client ,
180
- new GetObjectCommand ( { Bucket : data . keys . Bucket , Key : decodeURIComponent ( file_key ) } ) ,
181
- { expiresIn : 3600 * 24 }
182
- ) ,
183
- } ;
184
- setFiles ( ( files ) => ( files ? [ ...files , newFile ] : [ newFile ] ) ) ;
185
- toast . success ( "File uploaded successfully." ) ;
186
- } ,
180
+ } ) ;
181
+ } ) ;
182
+
183
+ upload . on ( "completed" , async ( ) => {
184
+ setUploadingFiles ( ( prevUploadingFiles ) =>
185
+ prevUploadingFiles . filter ( ( uploadFile ) => uploadFile . id !== id )
186
+ ) ;
187
+ const newFile : DriveFile = {
188
+ fullPath : Key ,
189
+ name : file . name ,
190
+ parent : currentFolder . fullPath ,
191
+ size : file . size . toString ( ) ,
192
+ createdAt : new Date ( ) . toISOString ( ) ,
193
+ contentType : mime . lookup ( file . name ) || "application/octet-stream" ,
194
+ bucketName : data . keys . Bucket ,
195
+ bucketUrl : `https://${ data . keys . Bucket } .s3.${ data . keys . region } .amazonaws.com` ,
196
+ url : await getSignedUrl (
197
+ s3Client ,
198
+ new GetObjectCommand ( { Bucket : data . keys . Bucket , Key : Key } ) ,
199
+ { expiresIn : 3600 * 24 }
200
+ ) ,
201
+ } ;
202
+
203
+ setFiles ( ( files ) => ( files ? [ ...files , newFile ] : [ newFile ] ) ) ;
204
+ toast . success ( "File uploaded successfully." ) ;
187
205
} ) ;
206
+
207
+ await upload . start ( ) ;
188
208
} ) ;
189
209
} ;
190
210
@@ -210,7 +230,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
210
230
name : fullPath . split ( "/" ) . pop ( ) ,
211
231
bucketName : data . keys . Bucket ,
212
232
parent : fullPath . split ( "/" ) . shift ( ) + "/" ,
213
- bucketUrl : `https:// ${ data . keys . Bucket } .s3. ${ data . keys . region } .amazonaws.com` ,
233
+ bucketUrl : data . keys . bucketUrl ,
214
234
} ) ;
215
235
} , [ fullPath , user ] ) ;
216
236
@@ -240,7 +260,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
240
260
size : result . Size . toString ( ) ,
241
261
contentType : mime . lookup ( result . Key ) || "" ,
242
262
bucketName : results . Name ,
243
- bucketUrl : `https:// ${ results . Name } .s3. ${ data . keys . region } .amazonaws.com` ,
263
+ bucketUrl : data . keys . bucketUrl ,
244
264
url : await getSignedUrl (
245
265
s3Client ,
246
266
new GetObjectCommand ( { Bucket : results . Name , Key : result . Key } ) ,
@@ -269,7 +289,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
269
289
name : results . CommonPrefixes [ i ] . Prefix . slice ( 0 , - 1 ) . split ( "/" ) . pop ( ) ,
270
290
bucketName : results . Name ,
271
291
parent : currentFolder . fullPath ,
272
- bucketUrl : `https:// ${ results . Name } .s3. ${ data . keys . region } .amazonaws.com` ,
292
+ bucketUrl : data . keys . bucketUrl ,
273
293
} ;
274
294
setFolders ( ( folders ) => [ ...folders , driveFolder ] ) ;
275
295
}
@@ -295,7 +315,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
295
315
size : result . Size . toString ( ) ,
296
316
contentType : mime . lookup ( result . Key ) || "" ,
297
317
bucketName : results . Name ,
298
- bucketUrl : `https:// ${ results . Name } .s3. ${ data . keys . region } .amazonaws.com` ,
318
+ bucketUrl : data . keys . bucketUrl ,
299
319
url : await getSignedUrl (
300
320
s3Client ,
301
321
new GetObjectCommand ( { Bucket : results . Name , Key : result . Key } ) ,
0 commit comments