@@ -62,12 +62,12 @@ def _add_wildcard_to_directories(pattern: str) -> str:
62
62
63
63
def get_hf_home () -> Path :
64
64
"""Get the Hugging Face home directory."""
65
- return Path (os .environ .get ("HF_HOME" , Path .home () / ".cache" / "huggingface" ))
65
+ return Path (os .environ .get ("HF_HOME" , Path .home ()/ ".cache" / "huggingface" ))
66
66
67
67
68
68
async def get_hf_token ():
69
69
"""Retrieve the Hugging Face token from the user's HF_HOME directory."""
70
- token_path = get_hf_home () / "token"
70
+ token_path = get_hf_home ()/ "token"
71
71
if await aios .path .exists (token_path ):
72
72
async with aiofiles .open (token_path , 'r' ) as f :
73
73
return (await f .read ()).strip ()
@@ -85,7 +85,7 @@ async def get_auth_headers():
85
85
def get_repo_root (repo_id : str ) -> Path :
86
86
"""Get the root directory for a given repo ID in the Hugging Face cache."""
87
87
sanitized_repo_id = repo_id .replace ("/" , "--" )
88
- return get_hf_home () / "hub" / f"models--{ sanitized_repo_id } "
88
+ return get_hf_home ()/ "hub" / f"models--{ sanitized_repo_id } "
89
89
90
90
91
91
async def fetch_file_list (session , repo_id , revision , path = "" ):
@@ -181,9 +181,9 @@ async def download_file(
181
181
downloaded_this_session += len (chunk )
182
182
if progress_callback and total_size :
183
183
elapsed_time = (datetime .now () - start_time ).total_seconds ()
184
- speed = int (downloaded_this_session / elapsed_time ) if elapsed_time > 0 else 0
184
+ speed = int (downloaded_this_session / elapsed_time ) if elapsed_time > 0 else 0
185
185
remaining_size = total_size - downloaded_size
186
- eta = timedelta (seconds = remaining_size / speed ) if speed > 0 else timedelta (0 )
186
+ eta = timedelta (seconds = remaining_size / speed ) if speed > 0 else timedelta (0 )
187
187
status = "in_progress" if downloaded_size < total_size else "complete"
188
188
if DEBUG >= 8 : print (f"HF repo file download progress: { file_path = } { elapsed_time = } { speed = } Downloaded={ downloaded_size } /{ total_size } { remaining_size = } { eta = } { status = } " )
189
189
await progress_callback (RepoFileProgressEvent (repo_id , revision , file_path , downloaded_size , downloaded_this_session , total_size , speed , eta , status ))
@@ -199,17 +199,17 @@ async def download_repo_files(
199
199
max_parallel_downloads : int = 4
200
200
) -> Path :
201
201
repo_root = get_repo_root (repo_id )
202
- refs_dir = repo_root / "refs"
203
- snapshots_dir = repo_root / "snapshots"
204
- cachedreqs_dir = repo_root / "cachedreqs"
202
+ refs_dir = repo_root / "refs"
203
+ snapshots_dir = repo_root / "snapshots"
204
+ cachedreqs_dir = repo_root / "cachedreqs"
205
205
206
206
# Ensure directories exist
207
207
await aios .makedirs (refs_dir , exist_ok = True )
208
208
await aios .makedirs (snapshots_dir , exist_ok = True )
209
209
await aios .makedirs (cachedreqs_dir , exist_ok = True )
210
210
211
211
# Check if we have a cached commit hash
212
- refs_file = refs_dir / revision
212
+ refs_file = refs_dir / revision
213
213
if await aios .path .exists (refs_file ):
214
214
async with aiofiles .open (refs_file , 'r' ) as f :
215
215
commit_hash = (await f .read ()).strip ()
@@ -230,13 +230,13 @@ async def download_repo_files(
230
230
await f .write (commit_hash )
231
231
232
232
# Set up the snapshot directory
233
- snapshot_dir = snapshots_dir / commit_hash
233
+ snapshot_dir = snapshots_dir / commit_hash
234
234
await aios .makedirs (snapshot_dir , exist_ok = True )
235
235
236
236
# Set up the cached file list directory
237
- cached_file_list_dir = cachedreqs_dir / commit_hash
237
+ cached_file_list_dir = cachedreqs_dir / commit_hash
238
238
await aios .makedirs (cached_file_list_dir , exist_ok = True )
239
- cached_file_list_path = cached_file_list_dir / "fetch_file_list.json"
239
+ cached_file_list_path = cached_file_list_dir / "fetch_file_list.json"
240
240
241
241
async with aiohttp .ClientSession () as session :
242
242
# Check if we have a cached file list
@@ -261,17 +261,17 @@ async def download_repo_files(
261
261
start_time = datetime .now ()
262
262
263
263
async def download_with_progress (file_info , progress_state ):
264
- local_path = snapshot_dir / file_info ["path" ]
264
+ local_path = snapshot_dir / file_info ["path" ]
265
265
if await aios .path .exists (local_path ) and (await aios .stat (local_path )).st_size == file_info ["size" ]:
266
266
if DEBUG >= 2 : print (f"File already fully downloaded: { file_info ['path' ]} " )
267
267
progress_state ['completed_files' ] += 1
268
268
progress_state ['downloaded_bytes' ] += file_info ["size" ]
269
269
file_progress [file_info ["path" ]] = RepoFileProgressEvent (repo_id , revision , file_info ["path" ], file_info ["size" ], 0 , file_info ["size" ], 0 , timedelta (0 ), "complete" )
270
270
if progress_callback :
271
271
elapsed_time = (datetime .now () - start_time ).total_seconds ()
272
- overall_speed = int (progress_state ['downloaded_bytes_this_session' ] / elapsed_time ) if elapsed_time > 0 else 0
272
+ overall_speed = int (progress_state ['downloaded_bytes_this_session' ]/ elapsed_time ) if elapsed_time > 0 else 0
273
273
remaining_bytes = total_bytes - progress_state ['downloaded_bytes' ]
274
- overall_eta = timedelta (seconds = remaining_bytes / overall_speed ) if overall_speed > 0 else timedelta (seconds = 0 )
274
+ overall_eta = timedelta (seconds = remaining_bytes / overall_speed ) if overall_speed > 0 else timedelta (seconds = 0 )
275
275
status = "in_progress" if progress_state ['completed_files' ] < total_files else "complete"
276
276
await progress_callback (
277
277
RepoProgressEvent (
@@ -287,9 +287,9 @@ async def file_progress_callback(event: RepoFileProgressEvent):
287
287
file_progress [event .file_path ] = event
288
288
if progress_callback :
289
289
elapsed_time = (datetime .now () - start_time ).total_seconds ()
290
- overall_speed = int (progress_state ['downloaded_bytes_this_session' ] / elapsed_time ) if elapsed_time > 0 else 0
290
+ overall_speed = int (progress_state ['downloaded_bytes_this_session' ]/ elapsed_time ) if elapsed_time > 0 else 0
291
291
remaining_bytes = total_bytes - progress_state ['downloaded_bytes' ]
292
- overall_eta = timedelta (seconds = remaining_bytes / overall_speed ) if overall_speed > 0 else timedelta (seconds = 0 )
292
+ overall_eta = timedelta (seconds = remaining_bytes / overall_speed ) if overall_speed > 0 else timedelta (seconds = 0 )
293
293
status = "in_progress" if progress_state ['downloaded_bytes' ] < total_bytes else "complete"
294
294
await progress_callback (
295
295
RepoProgressEvent (
@@ -305,9 +305,9 @@ async def file_progress_callback(event: RepoFileProgressEvent):
305
305
] = RepoFileProgressEvent (repo_id , revision , file_info ["path" ], file_info ["size" ], file_progress [file_info ["path" ]].downloaded_this_session , file_info ["size" ], 0 , timedelta (0 ), "complete" )
306
306
if progress_callback :
307
307
elapsed_time = (datetime .now () - start_time ).total_seconds ()
308
- overall_speed = int (progress_state ['downloaded_bytes_this_session' ] / elapsed_time ) if elapsed_time > 0 else 0
308
+ overall_speed = int (progress_state ['downloaded_bytes_this_session' ]/ elapsed_time ) if elapsed_time > 0 else 0
309
309
remaining_bytes = total_bytes - progress_state ['downloaded_bytes' ]
310
- overall_eta = timedelta (seconds = remaining_bytes / overall_speed ) if overall_speed > 0 else timedelta (seconds = 0 )
310
+ overall_eta = timedelta (seconds = remaining_bytes / overall_speed ) if overall_speed > 0 else timedelta (seconds = 0 )
311
311
status = "in_progress" if progress_state ['completed_files' ] < total_files else "complete"
312
312
await progress_callback (
313
313
RepoProgressEvent (
@@ -347,11 +347,11 @@ async def get_weight_map(repo_id: str, revision: str = "main") -> Optional[Dict[
347
347
348
348
# Check if the file exists
349
349
repo_root = get_repo_root (repo_id )
350
- snapshot_dir = repo_root / "snapshots"
350
+ snapshot_dir = repo_root / "snapshots"
351
351
index_file = next ((f for f in await aios .listdir (snapshot_dir ) if f .endswith ("model.safetensors.index.json" )), None )
352
352
353
353
if index_file :
354
- index_file_path = snapshot_dir / index_file
354
+ index_file_path = snapshot_dir / index_file
355
355
if await aios .path .exists (index_file_path ):
356
356
async with aiofiles .open (index_file_path , 'r' ) as f :
357
357
index_data = json .loads (await f .read ())
0 commit comments