Skip to content

Commit

Permalink
Merge branch 'master' into ssr-nodestatus
Browse files Browse the repository at this point in the history
  • Loading branch information
cryptobench authored Oct 10, 2024
2 parents 7e638d7 + 325a9a4 commit 7965356
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 20 deletions.
7 changes: 5 additions & 2 deletions stats-backend/api2/scoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,10 @@ def calculate_uptime_percentage(node_id, node=None):
if node is None:
node = Node.objects.get(node_id=node_id)
statuses = NodeStatusHistory.objects.filter(node_id=node_id).order_by("timestamp")

first_online_status = statuses.first()

if not first_online_status:
return 0 # Return 0% if the node has never been online
online_duration = timedelta(0)
last_online_time = None

Expand All @@ -22,7 +25,7 @@ def calculate_uptime_percentage(node_id, node=None):
if last_online_time is not None:
online_duration += timezone.now() - last_online_time

total_duration = timezone.now() - node.uptime_created_at
total_duration = timezone.now() - first_online_status.timestamp
uptime_percentage = (
online_duration.total_seconds() / total_duration.total_seconds()
) * 100
Expand Down
28 changes: 10 additions & 18 deletions stats-backend/api2/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1831,8 +1831,8 @@ def extract_wallets_and_ids():
def bulk_update_node_statuses(nodes_data):
status_history_to_create = []
redis_updates = {}
nodes_to_update = []

offline_nodes = []
online_nodes = []
for node_id, is_online in nodes_data:
latest_status = r.get(f"provider:{node_id}:status")

Expand Down Expand Up @@ -1861,31 +1861,22 @@ def bulk_update_node_statuses(nodes_data):
)
redis_updates[f"provider:{node_id}:status"] = str(is_online)

if not is_online:
nodes_to_update.append(node_id)
if is_online:
online_nodes.append(node_id)
else:
offline_nodes.append(node_id)

if status_history_to_create:
with transaction.atomic():
NodeStatusHistory.objects.bulk_create(status_history_to_create)

# Efficiently update Node objects for offline nodes
Node.objects.filter(node_id__in=nodes_to_update).update(online=False)

Node.objects.filter(node_id__in=offline_nodes).update(online=False)
Node.objects.filter(node_id__in=online_nodes).update(online=True)
if redis_updates:
r.mset(redis_updates)

# Clean up duplicate consecutive statuses
with transaction.atomic():
subquery = NodeStatusHistory.objects.filter(
node_id=OuterRef('node_id'),
timestamp__lt=OuterRef('timestamp')
).order_by('-timestamp')

duplicate_records = NodeStatusHistory.objects.annotate(
prev_status=Subquery(subquery.values('is_online')[:1])
).filter(is_online=F('prev_status'))

duplicate_records.delete()

from .utils import check_node_status
import aiohttp
Expand Down Expand Up @@ -1954,11 +1945,12 @@ def initial_relay_nodes_scan():
listen_for_relay_events.delay()



@app.task
def check_missing_nodes(missing_nodes):
nodes_to_update = []
for node_id in missing_nodes:
is_online = check_node_status(node_id)
nodes_to_update.append((node_id, is_online))

bulk_update_node_statuses(nodes_to_update)
bulk_update_node_statuses.delay(nodes_to_update)

0 comments on commit 7965356

Please sign in to comment.