@@ -428,21 +428,8 @@ async def _update_artist_metadata(self, artist: Artist, force_refresh: bool = Fa
428
428
local_provs = get_global_cache_value ("non_streaming_providers" )
429
429
if TYPE_CHECKING :
430
430
local_provs = cast (set [str ], local_provs )
431
- for prov_mapping in artist .provider_mappings :
432
- if prov_mapping .provider_instance not in local_provs :
433
- continue
434
- if (prov := self .mass .get_provider (prov_mapping .provider_instance )) is None :
435
- continue
436
- if prov .lookup_key in unique_keys :
437
- continue
438
- unique_keys .add (prov .lookup_key )
439
- with suppress (MediaNotFoundError ):
440
- prov_item = await self .mass .music .artists .get_provider_item (
441
- prov_mapping .item_id , prov_mapping .provider_instance
442
- )
443
- artist .metadata .update (prov_item .metadata )
444
431
445
- # collect metadata from all (online) music/ metadata providers
432
+ # collect metadata from all (online) music + metadata providers
446
433
# NOTE: we only allow this every REFRESH_INTERVAL and a max amount of calls per day
447
434
# to not overload the music/metadata providers with api calls
448
435
# TODO: Utilize a global (cloud) cache for metadata lookups to save on API calls
@@ -459,12 +446,14 @@ async def _update_artist_metadata(self, artist: Artist, force_refresh: bool = Fa
459
446
await self .mass .music .artists .match_providers (artist )
460
447
461
448
# collect metadata from all (streaming) music providers
449
+ # NOTE: local providers have already pushed their metadata in the sync
462
450
for prov_mapping in artist .provider_mappings :
463
451
if (prov := self .mass .get_provider (prov_mapping .provider_instance )) is None :
464
452
continue
465
453
if prov .lookup_key in unique_keys :
466
454
continue
467
- unique_keys .add (prov .lookup_key )
455
+ if prov .lookup_key not in local_provs :
456
+ unique_keys .add (prov .lookup_key )
468
457
with suppress (MediaNotFoundError ):
469
458
prov_item = await self .mass .music .artists .get_provider_item (
470
459
prov_mapping .item_id , prov_mapping .provider_instance
@@ -495,26 +484,12 @@ async def _update_artist_metadata(self, artist: Artist, force_refresh: bool = Fa
495
484
async def _update_album_metadata (self , album : Album , force_refresh : bool = False ) -> None :
496
485
"""Get/update rich metadata for an album."""
497
486
self .logger .debug ("Updating metadata for Album %s" , album .name )
498
- unique_keys : set [str ] = set ()
499
487
# collect (local) metadata from all local music providers
500
488
local_provs = get_global_cache_value ("non_streaming_providers" )
501
489
if TYPE_CHECKING :
502
490
local_provs = cast (set [str ], local_provs )
503
- for prov_mapping in album .provider_mappings :
504
- if prov_mapping .provider_instance not in local_provs :
505
- continue
506
- if (prov := self .mass .get_provider (prov_mapping .provider_instance )) is None :
507
- continue
508
- if prov .lookup_key in unique_keys :
509
- continue
510
- unique_keys .add (prov .lookup_key )
511
- with suppress (MediaNotFoundError ):
512
- prov_item = await self .mass .music .albums .get_provider_item (
513
- prov_mapping .item_id , prov_mapping .provider_instance
514
- )
515
- album .metadata .update (prov_item .metadata )
516
491
517
- # collect metadata from all (online) music/ metadata providers
492
+ # collect metadata from all (online) music + metadata providers
518
493
# NOTE: we only allow this every REFRESH_INTERVAL and a max amount of calls per day
519
494
# to not overload the (free) metadata providers with api calls
520
495
# TODO: Utilize a global (cloud) cache for metadata lookups to save on API calls
@@ -531,12 +506,15 @@ async def _update_album_metadata(self, album: Album, force_refresh: bool = False
531
506
await self .mass .music .albums .match_providers (album )
532
507
533
508
# collect metadata from all (streaming) music providers
509
+ # NOTE: local providers have already pushed their metadata in the sync
510
+ unique_keys : set [str ] = set ()
534
511
for prov_mapping in album .provider_mappings :
535
512
if (prov := self .mass .get_provider (prov_mapping .provider_instance )) is None :
536
513
continue
537
514
if prov .lookup_key in unique_keys :
538
515
continue
539
- unique_keys .add (prov .lookup_key )
516
+ if prov .lookup_key not in local_provs :
517
+ unique_keys .add (prov .lookup_key )
540
518
with suppress (MediaNotFoundError ):
541
519
prov_item = await self .mass .music .albums .get_provider_item (
542
520
prov_mapping .item_id , prov_mapping .provider_instance
@@ -600,7 +578,9 @@ async def _update_track_metadata(self, track: Track, force_refresh: bool = False
600
578
track .metadata .update (prov_item .metadata )
601
579
602
580
# collect metadata from all metadata providers
603
- if self .config .get_value (CONF_ENABLE_ONLINE_METADATA ):
581
+ # there is only little metadata available for tracks so we only fetch metadata
582
+ # from other sources if the force flag is set
583
+ if force_refresh and self .config .get_value (CONF_ENABLE_ONLINE_METADATA ):
604
584
for provider in self .providers :
605
585
if ProviderFeature .TRACK_METADATA not in provider .supported_features :
606
586
continue
@@ -756,39 +736,44 @@ async def _metadata_scanner(self) -> None:
756
736
self .logger .info ("Starting metadata scanner" )
757
737
self ._online_slots_available = MAX_ONLINE_CALLS_PER_RUN
758
738
timestamp = int (time () - 60 * 60 * 24 * 30 )
739
+ # ARTISTS metadata refresh
759
740
query = (
760
741
f"json_extract({ DB_TABLE_ARTISTS } .metadata,'$.last_refresh') ISNULL "
761
742
f"OR json_extract({ DB_TABLE_ARTISTS } .metadata,'$.last_refresh') < { timestamp } "
762
743
)
763
744
for artist in await self .mass .music .artists .library_items (
764
- limit = 2500 , order_by = "random" , extra_query = query
745
+ limit = 50 , order_by = "random" , extra_query = query
765
746
):
766
747
await self ._update_artist_metadata (artist )
767
748
749
+ # ALBUMS metadata refresh
768
750
query = (
769
751
f"json_extract({ DB_TABLE_ALBUMS } .metadata,'$.last_refresh') ISNULL "
770
752
f"OR json_extract({ DB_TABLE_ALBUMS } .metadata,'$.last_refresh') < { timestamp } "
771
753
)
772
754
for album in await self .mass .music .albums .library_items (
773
- limit = 2500 , order_by = "random" , extra_query = query
755
+ limit = 50 , order_by = "random" , extra_query = query
774
756
):
775
757
await self ._update_album_metadata (album )
776
758
759
+ # PLAYLISTS metadata refresh
777
760
query = (
778
761
f"json_extract({ DB_TABLE_PLAYLISTS } .metadata,'$.last_refresh') ISNULL "
779
762
f"OR json_extract({ DB_TABLE_PLAYLISTS } .metadata,'$.last_refresh') < { timestamp } "
780
763
)
781
764
for playlist in await self .mass .music .playlists .library_items (
782
- limit = 2500 , order_by = "random" , extra_query = query
765
+ limit = 50 , order_by = "random" , extra_query = query
783
766
):
784
767
await self ._update_playlist_metadata (playlist )
785
768
769
+ # TRACKS metadata refresh
770
+ timestamp = int (time () - 60 * 60 * 24 * 30 )
786
771
query = (
787
772
f"json_extract({ DB_TABLE_TRACKS } .metadata,'$.last_refresh') ISNULL "
788
773
f"OR json_extract({ DB_TABLE_TRACKS } .metadata,'$.last_refresh') < { timestamp } "
789
774
)
790
775
for track in await self .mass .music .tracks .library_items (
791
- limit = 2500 , order_by = "random" , extra_query = query
776
+ limit = 50 , order_by = "random" , extra_query = query
792
777
):
793
778
await self ._update_track_metadata (track )
794
779
self .logger .info ("Metadata scanner finished." )
0 commit comments