Skip to content

Commit

Permalink
Remove obsolete cache invalidation code from get_sources function(fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
dryruffian committed Oct 18, 2024
1 parent cd0ee7c commit 6b941d2
Showing 1 changed file with 9 additions and 21 deletions.
30 changes: 9 additions & 21 deletions api/api/controllers/search_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from api.utils import tallies
from api.utils.check_dead_links import check_dead_links
from api.utils.dead_link_mask import get_query_hash
from api.utils.search_context import SearchContext
from api.utils.text import SearchContext


# Using TYPE_CHECKING to avoid circular imports when importing types
Expand Down Expand Up @@ -460,9 +460,9 @@ def query_media(
)

result_ids = [result.identifier for result in results]
search_context = SearchContext.build(result_ids, origin_index)
text = SearchContext.build(result_ids, origin_index)

return results, page_count, result_count, search_context.asdict()
return results, page_count, result_count, text.asdict()


def tally_results(
Expand Down Expand Up @@ -537,25 +537,11 @@ def get_sources(index):
:return: A dictionary mapping sources to the count of their images.`
"""
source_cache_name = "sources-" + index
cache_fetch_failed = False
try:
sources = cache.get(key=source_cache_name)
except ValueError:
cache_fetch_failed = True
sources = None
logger.warning("Source cache fetch failed due to corruption")
except ConnectionError:
cache_fetch_failed = True
sources = None
logger.warning("Redis connect failed, cannot get cached sources.")

if isinstance(sources, list) or cache_fetch_failed:
sources = None
try:
# Invalidate old source format.
cache.delete(key=source_cache_name)
except ConnectionError:
logger.warning("Redis connect failed, cannot invalidate cached sources.")

if not sources:
# Don't increase `size` without reading this issue first:
Expand All @@ -570,7 +556,7 @@ def get_sources(index):
"size": size,
"order": {"_key": "desc"},
}
}
},
},
}
try:
Expand All @@ -583,16 +569,18 @@ def get_sources(index):
buckets = results["aggregations"]["unique_sources"]["buckets"]
except NotFoundError:
buckets = [{"key": "none_found", "doc_count": 0}]
sources = {result["key"]: result["doc_count"] for result in buckets}
sources = {bucket["key"]: bucket["doc_count"] for bucket in buckets}

try:
cache.set(
key=source_cache_name, timeout=SOURCE_CACHE_TIMEOUT, value=sources
key=source_cache_name,
timeout=SOURCE_CACHE_TIMEOUT,
value=sources,
)
except ConnectionError:
logger.warning("Redis connect failed, cannot cache sources.")

sources = {source: int(doc_count) for source, doc_count in sources.items()}
sources = {source: int(count) for source, count in sources.items()}
return sources


Expand Down

0 comments on commit 6b941d2

Please sign in to comment.