From f62113b30e2b94b1dbae358ced6d913892889ac3 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 5 Mar 2024 15:41:47 +0330 Subject: [PATCH 01/48] feat: Added actions matrix creation! note: We still need to update the assess_egagement and it is the core_analyzer library. --- .../analysis/analytics_interactions_script.py | 2 +- .../compute_interaction_matrix_discord.py | 71 ++++- .../analysis/compute_member_activity.py | 11 +- .../utils/compute_interaction_mtx_utils.py | 21 +- .../test_process_non_reaction_heatmaps.py | 246 ++++++++++++++++++ 5 files changed, 327 insertions(+), 24 deletions(-) create mode 100644 tests/unit/test_process_non_reaction_heatmaps.py diff --git a/discord_analyzer/analysis/analytics_interactions_script.py b/discord_analyzer/analysis/analytics_interactions_script.py index 837b870..a5ca076 100644 --- a/discord_analyzer/analysis/analytics_interactions_script.py +++ b/discord_analyzer/analysis/analytics_interactions_script.py @@ -67,7 +67,7 @@ def per_account_interactions( # flatten the list samples_flattened = list(itertools.chain(*samples)) - for i, sample in enumerate(samples_flattened): + for _, sample in enumerate(samples_flattened): account_name = sample[0]["account"] interaction_count = sample[0]["count"] diff --git a/discord_analyzer/analysis/compute_interaction_matrix_discord.py b/discord_analyzer/analysis/compute_interaction_matrix_discord.py index b2337b5..da0b08d 100644 --- a/discord_analyzer/analysis/compute_interaction_matrix_discord.py +++ b/discord_analyzer/analysis/compute_interaction_matrix_discord.py @@ -4,8 +4,9 @@ # compute_interaction_matrix_discord.py # # Author Ene SS Rawa / Tjitse van der Molen - -from discord_analyzer.analysis.utils.activity import Activity +from typing import Any +import copy +from tc_core_analyzer_lib.utils.activity import DiscordActivity from discord_analyzer.DB_operations.mongodb_access import DB_access from discord_analyzer.DB_operations.mongodb_query import MongodbQuery from numpy import ndarray @@ -21,7 +22,13 @@ def compute_interaction_matrix_discord( dates: list[str], channels: list[str], db_access: DB_access, - activities: list[str] = [Activity.Mention, Activity.Reply, Activity.Reaction], + activities: list[str] = [ + DiscordActivity.Mention, + DiscordActivity.Reply, + DiscordActivity.Reaction, + DiscordActivity.Lone_msg, + DiscordActivity.Thread_msg, + ], ) -> dict[str, ndarray]: """ Computes interaction matrix from discord data @@ -34,7 +41,7 @@ def compute_interaction_matrix_discord( db_access - obj : database access object activities - list[Activity] : the list of activities to generate the matrix for - default is to include all 3 `Activity` types + default is to include all activity types minimum length is 1 Output: @@ -45,8 +52,7 @@ def compute_interaction_matrix_discord( """ feature_projection = { - "thr_messages": 0, - "lone_messages": 0, + "channelId": 0, "replier": 0, "replied": 0, "mentioner": 0, @@ -77,15 +83,66 @@ def compute_interaction_matrix_discord( db_results = list(cursor) per_acc_query_result = prepare_per_account(db_results=db_results) + per_acc_interaction = process_non_reactions(per_acc_query_result) # And now compute the interactions per account_name (`acc`) int_mat = {} # computing `int_mat` per activity for activity in activities: int_mat[activity] = generate_interaction_matrix( - per_acc_interactions=per_acc_query_result, + per_acc_interactions=per_acc_interaction, acc_names=acc_names, activities=[activity], ) return int_mat + + +def process_non_reactions( + heatmaps_data_per_acc: dict[str, list[dict[str, Any]]], + skip_fields: list[str] = [ + "reacted_per_acc", + "mentioner_per_acc", + "replied_per_acc", + "account_name", + "date", + ], +) -> dict[str, list[dict[str, Any]]]: + """ + process the non-interactions heatmap data to be like interaction + we will make it self interactions + + Parameters + ----------- + heatmaps_data_per_acc : dict[str, list[dict[str, Any]]] + heatmaps data per account + the keys are accounts + and the values are the list of heatmaps documents related to them + skip_fields : list[str] + the part of heatmaps document that we don't need to make them like interaction + can be interactions itself and account_name, and date + + Returns + -------- + heatmaps_interactions_per_acc : dict[str, list[dict[str, Any]]] + the same as before but we have changed the non interaction ones to self interaction + """ + heatmaps_interactions_per_acc = copy.deepcopy(heatmaps_data_per_acc) + + for account in heatmaps_interactions_per_acc.keys(): + # for each heatmaps document + for document in heatmaps_interactions_per_acc[account]: + activities = document.keys() + actions = set(activities) - set(skip_fields) + + for action in actions: + action_count = sum(document[action]) + if action_count: + document[action] = [ + [{"account": account, "count": sum(document[action])}] + ] + else: + # action count was zero + document[action] = [] + + return heatmaps_interactions_per_acc diff --git a/discord_analyzer/analysis/compute_member_activity.py b/discord_analyzer/analysis/compute_member_activity.py index b1a0dc4..d214c58 100644 --- a/discord_analyzer/analysis/compute_member_activity.py +++ b/discord_analyzer/analysis/compute_member_activity.py @@ -214,12 +214,13 @@ def compute_member_activity( last_start = time_diff - relativedelta(days=window_param["period_size"] - 1) # # # ACTUAL ANALYSIS # # # - assess_engagment = EngagementAssessment( activities=[ DiscordActivity.Mention, DiscordActivity.Reply, DiscordActivity.Reaction, + DiscordActivity.Lone_msg, + DiscordActivity.Mention, ], activities_ignore_0_axis=[DiscordActivity.Mention], activities_ignore_1_axis=[], @@ -288,10 +289,10 @@ def compute_member_activity( acc_names, date_list_w_str, channels, db_access ) - # for each int_mat type - for key in list(int_mat.keys()): - # remove interactions with self - int_mat[key][np.diag_indices_from(int_mat[key])] = 0 + # # for each int_mat type + # for key in list(int_mat.keys()): + # # remove interactions with self + # int_mat[key][np.diag_indices_from(int_mat[key])] = 0 # assess engagement (graph_out, *activity_dict) = assess_engagment.compute( diff --git a/discord_analyzer/analysis/utils/compute_interaction_mtx_utils.py b/discord_analyzer/analysis/utils/compute_interaction_mtx_utils.py index f3d8636..d77ef10 100644 --- a/discord_analyzer/analysis/utils/compute_interaction_mtx_utils.py +++ b/discord_analyzer/analysis/utils/compute_interaction_mtx_utils.py @@ -5,7 +5,7 @@ from discord_analyzer.analysis.analytics_interactions_script import ( per_account_interactions, ) -from discord_analyzer.analysis.utils.activity import Activity +from tc_core_analyzer_lib.utils.activity import DiscordActivity def prepare_per_account(db_results: list) -> dict[str, list[dict]]: @@ -29,13 +29,9 @@ def prepare_per_account(db_results: list) -> dict[str, list[dict]]: # a dictionary for results of each account for db_record in db_results: - # if the data for a specific account was not created before, create one as list acc_name = db_record["account_name"] - if acc_name not in per_acc_query_result.keys(): - per_acc_query_result[acc_name] = [db_record] - # else, append - else: - per_acc_query_result[acc_name].append(db_record) + per_acc_query_result.setdefault(acc_name, []) + per_acc_query_result[acc_name].append(db_record) return per_acc_query_result @@ -66,7 +62,6 @@ def generate_interaction_matrix( an array of integer values each row and column are representative of account interactions """ - int_matrix = np.zeros((len(acc_names), len(acc_names)), dtype=np.uint16) for acc in per_acc_interactions.keys(): @@ -117,12 +112,16 @@ def prepare_interaction_field_names(activities: list[str]) -> list[str]: """ field_names = [] for activity in activities: - if activity == Activity.Mention: + if activity == DiscordActivity.Mention: field_names.append("mentioner_per_acc") - elif activity == Activity.Reply: + elif activity == DiscordActivity.Reply: field_names.append("replied_per_acc") - elif activity == Activity.Reaction: + elif activity == DiscordActivity.Reaction: field_names.append("reacted_per_acc") + elif activity == DiscordActivity.Thread_msg: + field_names.append("thr_messages") + elif activity == DiscordActivity.Lone_msg: + field_names.append("lone_messages") else: logging.warning("prepare_interaction_field_names: Wrong activity given!") diff --git a/tests/unit/test_process_non_reaction_heatmaps.py b/tests/unit/test_process_non_reaction_heatmaps.py new file mode 100644 index 0000000..853d62b --- /dev/null +++ b/tests/unit/test_process_non_reaction_heatmaps.py @@ -0,0 +1,246 @@ +from unittest import TestCase + +from discord_analyzer.analysis.compute_interaction_matrix_discord import ( + process_non_reactions, +) +import numpy as np + + +class TestProcessNonReactions(TestCase): + def test_empty_inputs(self): + intput_data = {} + results = process_non_reactions(heatmaps_data_per_acc=intput_data) + self.assertEqual(results, {}) + + def test_single_account_no_action(self): + # 24 hours + zeros_vector = np.zeros(24) + input_data = { + "acc1": [ + { + "lone_messages": zeros_vector, + "thr_messages": zeros_vector, + "reacted_per_acc": [ + [{"account": "acc2", "count": 1}], + [{"account": "acc3", "count": 5}], + ], + "replied_per_acc": [], + "date": "2024-01-01", + } + ] + } + results = process_non_reactions(input_data) + + expected_results = { + "acc1": [ + { + "lone_messages": [], + "thr_messages": [], + # others same as before + "reacted_per_acc": [ + [{"account": "acc2", "count": 1}], + [{"account": "acc3", "count": 5}], + ], + "replied_per_acc": [], + "date": "2024-01-01", + } + ] + } + self.assertEqual(results, expected_results) + + def test_single_account_with_action(self): + lone_messages = np.zeros(24) + # 3 channel messages at hour 6 + lone_messages[5] = 3 + + thr_messages = np.zeros(24) + thr_messages[1] = 1 + + input_data = { + "acc1": [ + { + "lone_messages": lone_messages, + "thr_messages": thr_messages, + "reacted_per_acc": [ + [{"account": "acc2", "count": 1}], + [{"account": "acc3", "count": 5}], + ], + "replied_per_acc": [], + "date": "2024-01-01", + } + ] + } + results = process_non_reactions(input_data) + expected_results = { + "acc1": [ + { + "lone_messages": [[{"account": "acc1", "count": 3}]], + "thr_messages": [[{"account": "acc1", "count": 1}]], + # others same as before + "reacted_per_acc": [ + [{"account": "acc2", "count": 1}], + [{"account": "acc3", "count": 5}], + ], + "replied_per_acc": [], + "date": "2024-01-01", + } + ] + } + self.assertEqual(results, expected_results) + + def test_multiple_account_with_action(self): + user1_lone_messages = np.zeros(24) + # 3 channel messages from hour 6 to 7 + user1_lone_messages[5] = 3 + + user1_thr_messages = np.zeros(24) + user1_thr_messages[1] = 1 + + user2_thr_messages = np.zeros(24) + user2_thr_messages[7] = 5 + user2_thr_messages[20] = 2 + + input_data = { + "acc1": [ + { + "lone_messages": user1_lone_messages, + "thr_messages": user1_thr_messages, + "reacted_per_acc": [ + [{"account": "acc2", "count": 1}], + [{"account": "acc3", "count": 5}], + ], + "replied_per_acc": {}, + "date": "2024-01-01", + } + ], + "acc2": [ + { + "lone_messages": np.zeros(24), + "thr_messages": user2_thr_messages, + "reacted_per_acc": [ + [{"account": "acc5", "count": 3}], + ], + "replied_per_acc": [], + "date": "2024-01-01", + } + ], + } + results = process_non_reactions(input_data) + + expected_results = { + "acc1": [ + { + "lone_messages": [[{"account": "acc1", "count": 3}]], + "thr_messages": [[{"account": "acc1", "count": 1}]], + # others same as before + "reacted_per_acc": [ + [{"account": "acc2", "count": 1}], + [{"account": "acc3", "count": 5}], + ], + "replied_per_acc": {}, + "date": "2024-01-01", + } + ], + "acc2": [ + { + "lone_messages": [], + "thr_messages": [[{"account": "acc2", "count": 7}]], + # others same as before + "reacted_per_acc": [ + [{"account": "acc5", "count": 3}], + ], + "replied_per_acc": [], + "date": "2024-01-01", + } + ], + } + self.assertEqual(results, expected_results) + + def test_multiple_account_multiple_documents_with_action(self): + user1_lone_messages = np.zeros(24) + # 3 channel messages from hour 6 to 7 + user1_lone_messages[5] = 3 + + user1_thr_messages = np.zeros(24) + user1_thr_messages[1] = 1 + + user2_thr_messages = np.zeros(24) + user2_thr_messages[7] = 5 + user2_thr_messages[20] = 2 + + input_data = { + "acc1": [ + { + "lone_messages": user1_lone_messages, + "thr_messages": user1_thr_messages, + "reacted_per_acc": [ + [{"account": "acc2", "count": 1}], + [{"account": "acc3", "count": 5}], + ], + "replied_per_acc": {}, + "date": "2024-01-01", + }, + { + "lone_messages": np.zeros(24), + "thr_messages": user1_lone_messages, + "reacted_per_acc": [ + [{"account": "acc2", "count": 1}], + [{"account": "acc3", "count": 5}], + ], + "replied_per_acc": {}, + "date": "2024-01-02", + }, + ], + "acc2": [ + { + "lone_messages": np.zeros(24), + "thr_messages": user2_thr_messages, + "reacted_per_acc": [ + [{"account": "acc5", "count": 3}], + ], + "replied_per_acc": [], + "date": "2024-01-01", + } + ], + } + results = process_non_reactions(input_data) + + expected_results = { + "acc1": [ + { + "lone_messages": [[{"account": "acc1", "count": 3}]], + "thr_messages": [[{"account": "acc1", "count": 1}]], + # others same as before + "reacted_per_acc": [ + [{"account": "acc2", "count": 1}], + [{"account": "acc3", "count": 5}], + ], + "replied_per_acc": {}, + "date": "2024-01-01", + }, + { + "lone_messages": [], + "thr_messages": [[{"account": "acc1", "count": 3}]], + # others same as before + "reacted_per_acc": [ + [{"account": "acc2", "count": 1}], + [{"account": "acc3", "count": 5}], + ], + "replied_per_acc": {}, + "date": "2024-01-02", + }, + ], + "acc2": [ + { + "lone_messages": [], + "thr_messages": [[{"account": "acc2", "count": 7}]], + # others same as before + "reacted_per_acc": [ + [{"account": "acc5", "count": 3}], + ], + "replied_per_acc": [], + "date": "2024-01-01", + } + ], + } + self.assertEqual(results, expected_results) From 60f668c09e72cc6bdd480dad68e83669456298b7 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 5 Mar 2024 16:01:07 +0330 Subject: [PATCH 02/48] feat: zeroing self interactions! --- .../analysis/compute_interaction_matrix_discord.py | 11 +++++++++-- discord_analyzer/analysis/compute_member_activity.py | 5 ----- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/discord_analyzer/analysis/compute_interaction_matrix_discord.py b/discord_analyzer/analysis/compute_interaction_matrix_discord.py index da0b08d..fc911bb 100644 --- a/discord_analyzer/analysis/compute_interaction_matrix_discord.py +++ b/discord_analyzer/analysis/compute_interaction_matrix_discord.py @@ -6,10 +6,10 @@ # Author Ene SS Rawa / Tjitse van der Molen from typing import Any import copy -from tc_core_analyzer_lib.utils.activity import DiscordActivity from discord_analyzer.DB_operations.mongodb_access import DB_access from discord_analyzer.DB_operations.mongodb_query import MongodbQuery -from numpy import ndarray +from tc_core_analyzer_lib.utils.activity import DiscordActivity +from numpy import ndarray, diag_indices_from from .utils.compute_interaction_mtx_utils import ( generate_interaction_matrix, @@ -94,6 +94,13 @@ def compute_interaction_matrix_discord( acc_names=acc_names, activities=[activity], ) + # a person interacting to themselves is not counted as activity + if activity in [ + DiscordActivity.Reply, + DiscordActivity.Reaction, + DiscordActivity.Mention, + ]: + int_mat[activity][diag_indices_from(int_mat[activity])] = 0 return int_mat diff --git a/discord_analyzer/analysis/compute_member_activity.py b/discord_analyzer/analysis/compute_member_activity.py index d214c58..7498e92 100644 --- a/discord_analyzer/analysis/compute_member_activity.py +++ b/discord_analyzer/analysis/compute_member_activity.py @@ -289,11 +289,6 @@ def compute_member_activity( acc_names, date_list_w_str, channels, db_access ) - # # for each int_mat type - # for key in list(int_mat.keys()): - # # remove interactions with self - # int_mat[key][np.diag_indices_from(int_mat[key])] = 0 - # assess engagement (graph_out, *activity_dict) = assess_engagment.compute( int_mat=int_mat, From 637fe3e850a6542204f769b8427416d48095fb26 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 5 Mar 2024 16:11:45 +0330 Subject: [PATCH 03/48] fix: isort linter issue! --- discord_analyzer/analysis/compute_interaction_matrix_discord.py | 2 +- tests/unit/test_process_non_reaction_heatmaps.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/discord_analyzer/analysis/compute_interaction_matrix_discord.py b/discord_analyzer/analysis/compute_interaction_matrix_discord.py index fc911bb..93dcd6a 100644 --- a/discord_analyzer/analysis/compute_interaction_matrix_discord.py +++ b/discord_analyzer/analysis/compute_interaction_matrix_discord.py @@ -4,8 +4,8 @@ # compute_interaction_matrix_discord.py # # Author Ene SS Rawa / Tjitse van der Molen -from typing import Any import copy +from typing import Any from discord_analyzer.DB_operations.mongodb_access import DB_access from discord_analyzer.DB_operations.mongodb_query import MongodbQuery from tc_core_analyzer_lib.utils.activity import DiscordActivity diff --git a/tests/unit/test_process_non_reaction_heatmaps.py b/tests/unit/test_process_non_reaction_heatmaps.py index 853d62b..6532fca 100644 --- a/tests/unit/test_process_non_reaction_heatmaps.py +++ b/tests/unit/test_process_non_reaction_heatmaps.py @@ -1,9 +1,9 @@ from unittest import TestCase +import numpy as np from discord_analyzer.analysis.compute_interaction_matrix_discord import ( process_non_reactions, ) -import numpy as np class TestProcessNonReactions(TestCase): From 229c9b511e306aa4d1c02a7bf3c0031a8a992fcd Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 5 Mar 2024 16:53:38 +0330 Subject: [PATCH 04/48] fix: linter issue again! --- .../analysis/compute_interaction_matrix_discord.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/discord_analyzer/analysis/compute_interaction_matrix_discord.py b/discord_analyzer/analysis/compute_interaction_matrix_discord.py index 93dcd6a..43b305d 100644 --- a/discord_analyzer/analysis/compute_interaction_matrix_discord.py +++ b/discord_analyzer/analysis/compute_interaction_matrix_discord.py @@ -6,10 +6,11 @@ # Author Ene SS Rawa / Tjitse van der Molen import copy from typing import Any + from discord_analyzer.DB_operations.mongodb_access import DB_access from discord_analyzer.DB_operations.mongodb_query import MongodbQuery -from tc_core_analyzer_lib.utils.activity import DiscordActivity from numpy import ndarray, diag_indices_from +from tc_core_analyzer_lib.utils.activity import DiscordActivity from .utils.compute_interaction_mtx_utils import ( generate_interaction_matrix, From 5586800c7a3ccc412bc6a66738d030376a7d9f68 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 5 Mar 2024 17:02:26 +0330 Subject: [PATCH 05/48] fix: linter issue again! --- discord_analyzer/analysis/compute_interaction_matrix_discord.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/discord_analyzer/analysis/compute_interaction_matrix_discord.py b/discord_analyzer/analysis/compute_interaction_matrix_discord.py index 43b305d..cba5cf6 100644 --- a/discord_analyzer/analysis/compute_interaction_matrix_discord.py +++ b/discord_analyzer/analysis/compute_interaction_matrix_discord.py @@ -9,7 +9,7 @@ from discord_analyzer.DB_operations.mongodb_access import DB_access from discord_analyzer.DB_operations.mongodb_query import MongodbQuery -from numpy import ndarray, diag_indices_from +from numpy import diag_indices_from, ndarray from tc_core_analyzer_lib.utils.activity import DiscordActivity from .utils.compute_interaction_mtx_utils import ( From 5d2c1d926fe90ee0a036b6d264c87e081a21722b Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 6 Mar 2024 07:38:37 +0330 Subject: [PATCH 06/48] feat: added ignore received interactions! ignore the users that received interaction to be active. --- discord_analyzer/analysis/compute_member_activity.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/discord_analyzer/analysis/compute_member_activity.py b/discord_analyzer/analysis/compute_member_activity.py index 7498e92..f8adde5 100644 --- a/discord_analyzer/analysis/compute_member_activity.py +++ b/discord_analyzer/analysis/compute_member_activity.py @@ -222,7 +222,11 @@ def compute_member_activity( DiscordActivity.Lone_msg, DiscordActivity.Mention, ], - activities_ignore_0_axis=[DiscordActivity.Mention], + activities_ignore_0_axis=[ + DiscordActivity.Mention, + DiscordActivity.Reaction, + DiscordActivity.Reply, + ], activities_ignore_1_axis=[], ) From 65cf2ecbf9717ce9cebbea9beb592f86cdc6df0b Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 6 Mar 2024 08:23:47 +0330 Subject: [PATCH 07/48] feat: Added user action test cases! --- .../analysis/compute_member_activity.py | 2 +- ...est_member_activities_action_all_active.py | 152 ++++++++++++++++++ 2 files changed, 153 insertions(+), 1 deletion(-) create mode 100644 tests/integration/test_member_activities_action_all_active.py diff --git a/discord_analyzer/analysis/compute_member_activity.py b/discord_analyzer/analysis/compute_member_activity.py index f8adde5..79b7714 100644 --- a/discord_analyzer/analysis/compute_member_activity.py +++ b/discord_analyzer/analysis/compute_member_activity.py @@ -220,7 +220,7 @@ def compute_member_activity( DiscordActivity.Reply, DiscordActivity.Reaction, DiscordActivity.Lone_msg, - DiscordActivity.Mention, + DiscordActivity.Thread_msg, ], activities_ignore_0_axis=[ DiscordActivity.Mention, diff --git a/tests/integration/test_member_activities_action_all_active.py b/tests/integration/test_member_activities_action_all_active.py new file mode 100644 index 0000000..ebdbc16 --- /dev/null +++ b/tests/integration/test_member_activities_action_all_active.py @@ -0,0 +1,152 @@ +from unittest import TestCase +from datetime import datetime, timedelta + +from .utils.analyzer_setup import launch_db_access, setup_analyzer +from .utils.remove_and_setup_guild import setup_db_guild + + +class TestMemberActivitiesActionsAllActive(TestCase): + def setUp(self) -> None: + self.guildId = "1234" + self.db_access = launch_db_access(self.guildId) + + def test_single_user_action(self): + users_id_list = ["user1"] + setup_db_guild( + self.db_access, + self.guildId, + discordId_list=users_id_list, + days_ago_period=35, + ) + self.db_access.db_mongo_client[self.guildId]["heatmaps"].delete_many({}) + self.db_access.db_mongo_client[self.guildId].create_collection("heatmaps") + + rawinfo_samples = [] + for i in range(35 * 24): + sample = { + "type": 0, + "author": "user1", + "content": f"test message {i}", + "user_mentions": [], + "role_mentions": [], + "reactions": [], + "replied_user": None, + "createdDate": (datetime.now() - timedelta(hours=i)), + "messageId": f"11188143219343360{i}", + "channelId": "1020707129214111827", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + } + rawinfo_samples.append(sample) + + self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( + rawinfo_samples + ) + analyzer = setup_analyzer() + analyzer.recompute_analytics(self.guildId) + cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( + {}, {"_id": 0, "all_active": 1} + ) + + # memberactivities + computed_analytics = list(cursor) + + for document in computed_analytics: + self.assertEqual(set(document["all_active"]), set(["user1"])) + + def test_lone_msg_action(self): + users_id_list = ["user1", "user2", "user3"] + setup_db_guild( + self.db_access, + self.guildId, + discordId_list=users_id_list, + days_ago_period=35, + ) + self.db_access.db_mongo_client[self.guildId]["heatmaps"].delete_many({}) + self.db_access.db_mongo_client[self.guildId].create_collection("heatmaps") + + rawinfo_samples = [] + active_users = ["user1", "user2"] + for i in range(35 * 24): + sample = { + "type": 0, + "author": active_users[i % len(active_users)], + "content": f"test message {i}", + "user_mentions": [], + "role_mentions": [], + "reactions": [], + "replied_user": None, + "createdDate": (datetime.now() - timedelta(hours=i)), + "messageId": f"11188143219343360{i}", + "channelId": "1020707129214111827", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + } + rawinfo_samples.append(sample) + + self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( + rawinfo_samples + ) + analyzer = setup_analyzer() + analyzer.recompute_analytics(self.guildId) + cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( + {}, {"_id": 0, "all_active": 1} + ) + + # memberactivities + computed_analytics = list(cursor) + + for document in computed_analytics: + self.assertEqual(set(document["all_active"]), set(["user1", "user2"])) + + def test_thr_message_action(self): + users_id_list = ["user1", "user2", "user3", "user4"] + setup_db_guild( + self.db_access, + self.guildId, + discordId_list=users_id_list, + days_ago_period=35, + ) + self.db_access.db_mongo_client[self.guildId]["heatmaps"].delete_many({}) + self.db_access.db_mongo_client[self.guildId].create_collection("heatmaps") + + rawinfo_samples = [] + active_users = ["user1", "user2"] + for i in range(35 * 24): + sample = { + "type": 0, + "author": active_users[i % len(active_users)], + "content": f"test message {i}", + "user_mentions": [], + "role_mentions": [], + "reactions": [], + "replied_user": None, + "createdDate": (datetime.now() - timedelta(hours=i)), + "messageId": f"11188143219343360{i}", + "channelId": "1020707129214111827", + "channelName": "general", + "threadId": f"19191{i % 5}", + "threadName": f"Thread_test_{i % 5}", + "isGeneratedByWebhook": False, + } + rawinfo_samples.append(sample) + + self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( + rawinfo_samples + ) + analyzer = setup_analyzer() + analyzer.recompute_analytics(self.guildId) + cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( + {}, {"_id": 0, "all_active": 1, "date": 1} + ) + + # memberactivities + computed_analytics = list(cursor) + + for document in computed_analytics: + print(document) + self.assertEqual(set(document["all_active"]), set(["user1", "user2"])) From 86b3e2425e6f07419642c5b53530e16206840d38 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 6 Mar 2024 08:31:30 +0330 Subject: [PATCH 08/48] fix: isort linter! --- tests/integration/test_member_activities_action_all_active.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_member_activities_action_all_active.py b/tests/integration/test_member_activities_action_all_active.py index ebdbc16..55df84b 100644 --- a/tests/integration/test_member_activities_action_all_active.py +++ b/tests/integration/test_member_activities_action_all_active.py @@ -1,5 +1,5 @@ -from unittest import TestCase from datetime import datetime, timedelta +from unittest import TestCase from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.remove_and_setup_guild import setup_db_guild From 13e4a00acb2451d5912ff40fd01347c490a5066a Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 6 Mar 2024 09:54:23 +0330 Subject: [PATCH 09/48] feat: Added test case for other activities than active! --- ...member_activities_action_all_activities.py | 96 +++++++++++++++++++ .../utils/remove_and_setup_guild.py | 35 ++++--- 2 files changed, 116 insertions(+), 15 deletions(-) create mode 100644 tests/integration/test_member_activities_action_all_activities.py diff --git a/tests/integration/test_member_activities_action_all_activities.py b/tests/integration/test_member_activities_action_all_activities.py new file mode 100644 index 0000000..f19df8a --- /dev/null +++ b/tests/integration/test_member_activities_action_all_activities.py @@ -0,0 +1,96 @@ +from datetime import datetime, timedelta +from unittest import TestCase + +from .utils.analyzer_setup import launch_db_access, setup_analyzer +from .utils.remove_and_setup_guild import setup_db_guild + + +class TestMemberActivitiesActionsAllActivities(TestCase): + def setUp(self) -> None: + self.guildId = "1234" + self.db_access = launch_db_access(self.guildId) + + def test_single_user_action(self): + """ + just actions and no interaction + """ + users_id_list = ["user1"] + action = { + "INT_THR": 1, + "UW_DEG_THR": 1, + "PAUSED_T_THR": 1, + "CON_T_THR": 4, + "CON_O_THR": 3, + "EDGE_STR_THR": 5, + "UW_THR_DEG_THR": 5, + "VITAL_T_THR": 4, + "VITAL_O_THR": 3, + "STILL_T_THR": 2, + "STILL_O_THR": 2, + "DROP_H_THR": 2, + "DROP_I_THR": 1, + } + setup_db_guild( + self.db_access, + self.guildId, + discordId_list=users_id_list, + days_ago_period=35, + action=action, + ) + self.db_access.db_mongo_client[self.guildId]["heatmaps"].delete_many({}) + self.db_access.db_mongo_client[self.guildId].create_collection("heatmaps") + + rawinfo_samples = [] + for i in range(35 * 24): + sample = { + "type": 0, + "author": "user1", + "content": f"test message {i}", + "user_mentions": [], + "role_mentions": [], + "reactions": [], + "replied_user": None, + "createdDate": (datetime.now() - timedelta(hours=i)), + "messageId": f"11188143219343360{i}", + "channelId": "1020707129214111827", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + } + rawinfo_samples.append(sample) + + self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( + rawinfo_samples + ) + analyzer = setup_analyzer() + analyzer.recompute_analytics(self.guildId) + cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( + {}, + { + "_id": 0, + "all_connected": 1, + "all_vital": 1, + "all_consistent": 1, + "all_new_active": 1, + }, + ) + + # memberactivities + computed_analytics = list(cursor) + + for idx, document in enumerate(computed_analytics): + self.assertEqual(document["all_connected"], []) + self.assertEqual(document["all_vital"], []) + + # first period + if idx < 7: + self.assertEqual(document["all_new_active"], ["user1"]) + else: + self.assertEqual(document["all_new_active"], []) + + if idx < 14: + self.assertEqual(document["all_consistent"], []) + # 3rd period + else: + self.assertEqual(document["all_consistent"], ["user1"]) diff --git a/tests/integration/utils/remove_and_setup_guild.py b/tests/integration/utils/remove_and_setup_guild.py index 24f0a85..885ef6d 100644 --- a/tests/integration/utils/remove_and_setup_guild.py +++ b/tests/integration/utils/remove_and_setup_guild.py @@ -13,6 +13,7 @@ def setup_db_guild( discordId_isbot: list[bool] = [False], dates: Optional[list[datetime]] = None, days_ago_period: int = 30, + **kwargs, ): """ Remove the guild from Core databse and then insert it there @@ -28,21 +29,25 @@ def setup_db_guild( ) db_access.db_mongo_client.drop_database(guildId) - action = { - "INT_THR": 1, - "UW_DEG_THR": 1, - "PAUSED_T_THR": 1, - "CON_T_THR": 4, - "CON_O_THR": 3, - "EDGE_STR_THR": 5, - "UW_THR_DEG_THR": 5, - "VITAL_T_THR": 4, - "VITAL_O_THR": 3, - "STILL_T_THR": 2, - "STILL_O_THR": 2, - "DROP_H_THR": 2, - "DROP_I_THR": 1, - } + action = kwargs.get( + "action", + { + "INT_THR": 1, + "UW_DEG_THR": 1, + "PAUSED_T_THR": 1, + "CON_T_THR": 4, + "CON_O_THR": 3, + "EDGE_STR_THR": 5, + "UW_THR_DEG_THR": 5, + "VITAL_T_THR": 4, + "VITAL_O_THR": 3, + "STILL_T_THR": 2, + "STILL_O_THR": 2, + "DROP_H_THR": 2, + "DROP_I_THR": 1, + }, + ) + db_access.db_mongo_client["Core"]["platforms"].insert_one( { "_id": ObjectId(platform_id), From 809ead9b087c7f5704dad1ab17cc63250176534a Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 6 Mar 2024 15:04:34 +0330 Subject: [PATCH 10/48] fix: we were ignoring interactions wrong! --- .../compute_interaction_matrix_discord.py | 28 ++++--- .../analysis/compute_member_activity.py | 30 ++++--- .../analyzer/analyzer_heatmaps.py | 10 ++- discord_analyzer/analyzer/heatmaps_utils.py | 36 ++++++++ .../test_memberactivities_mentions.py | 78 ++++++++++++++++++ .../test_memberactivities_reaction.py | 82 +++++++++++++++++++ .../test_memberactivities_reply.py | 78 ++++++++++++++++++ 7 files changed, 316 insertions(+), 26 deletions(-) create mode 100644 tests/integration/test_memberactivities_mentions.py create mode 100644 tests/integration/test_memberactivities_reaction.py create mode 100644 tests/integration/test_memberactivities_reply.py diff --git a/discord_analyzer/analysis/compute_interaction_matrix_discord.py b/discord_analyzer/analysis/compute_interaction_matrix_discord.py index cba5cf6..f3cee51 100644 --- a/discord_analyzer/analysis/compute_interaction_matrix_discord.py +++ b/discord_analyzer/analysis/compute_interaction_matrix_discord.py @@ -23,13 +23,7 @@ def compute_interaction_matrix_discord( dates: list[str], channels: list[str], db_access: DB_access, - activities: list[str] = [ - DiscordActivity.Mention, - DiscordActivity.Reply, - DiscordActivity.Reaction, - DiscordActivity.Lone_msg, - DiscordActivity.Thread_msg, - ], + **kwargs, ) -> dict[str, ndarray]: """ Computes interaction matrix from discord data @@ -40,10 +34,11 @@ def compute_interaction_matrix_discord( dates - [str] : list of all dates to be considered for analysis channels - [str] : list of all channel ids to be considered for analysis db_access - obj : database access object - activities - list[Activity] : - the list of activities to generate the matrix for - default is to include all activity types - minimum length is 1 + **kwargs : + activities - list[Activity] : + the list of activities to generate the matrix for + default is to include all activity types + minimum length is 1 Output: --------- @@ -51,7 +46,16 @@ def compute_interaction_matrix_discord( keys are representative of an activity and the 2d matrix representing the interactions for the activity """ - + activities = kwargs.get( + "activities", + [ + DiscordActivity.Mention, + DiscordActivity.Reply, + DiscordActivity.Reaction, + DiscordActivity.Lone_msg, + DiscordActivity.Thread_msg, + ], + ) feature_projection = { "channelId": 0, "replier": 0, diff --git a/discord_analyzer/analysis/compute_member_activity.py b/discord_analyzer/analysis/compute_member_activity.py index 79b7714..fad07f2 100644 --- a/discord_analyzer/analysis/compute_member_activity.py +++ b/discord_analyzer/analysis/compute_member_activity.py @@ -214,20 +214,25 @@ def compute_member_activity( last_start = time_diff - relativedelta(days=window_param["period_size"] - 1) # # # ACTUAL ANALYSIS # # # + activities_to_analyze = [ + DiscordActivity.Mention, + DiscordActivity.Reply, + DiscordActivity.Reaction, + DiscordActivity.Lone_msg, + DiscordActivity.Thread_msg, + ] + + # no need to ignore reactions assess_engagment = EngagementAssessment( - activities=[ - DiscordActivity.Mention, - DiscordActivity.Reply, - DiscordActivity.Reaction, - DiscordActivity.Lone_msg, - DiscordActivity.Thread_msg, - ], + activities=activities_to_analyze, activities_ignore_0_axis=[ DiscordActivity.Mention, - DiscordActivity.Reaction, + ], + activities_ignore_1_axis=[ DiscordActivity.Reply, + # TODO: Why commenting reaction doesn't make any problems? + # DiscordActivity.Reaction, ], - activities_ignore_1_axis=[], ) # for every window index @@ -290,8 +295,13 @@ def compute_member_activity( # obtain interaction matrix int_mat = compute_interaction_matrix_discord( - acc_names, date_list_w_str, channels, db_access + acc_names, + date_list_w_str, + channels, + db_access, + activities=activities_to_analyze, ) + print(f"int_mat: {int_mat}") # assess engagement (graph_out, *activity_dict) = assess_engagment.compute( diff --git a/discord_analyzer/analyzer/analyzer_heatmaps.py b/discord_analyzer/analyzer/analyzer_heatmaps.py index d475348..6f16645 100644 --- a/discord_analyzer/analyzer/analyzer_heatmaps.py +++ b/discord_analyzer/analyzer/analyzer_heatmaps.py @@ -6,6 +6,7 @@ from discord_analyzer.analysis.activity_hourly import activity_hourly from discord_analyzer.analyzer.heatmaps_utils import ( get_bot_id, + get_userids, getNumberOfActions, store_counts_dict, ) @@ -119,7 +120,10 @@ def analysis_heatmap(self, guildId: str, from_start: bool = False): continue prepared_list = [] - account_list = [] + account_list = get_userids( + db_mongo_client=self.DB_connections.mongoOps.mongo_db_access.db_mongo_client, + guildId=guildId, + ) for entry in entries: if "replied_user" not in entry: @@ -147,9 +151,7 @@ def analysis_heatmap(self, guildId: str, from_start: bool = False): if entry["user_mentions"] is not None: for account in entry["user_mentions"]: - # for making the line shorter - condition2 = account not in bot_ids - if account not in account_list and condition2: + if account not in account_list and account not in bot_ids: account_list.append(account) activity = activity_hourly(prepared_list, acc_names=account_list) diff --git a/discord_analyzer/analyzer/heatmaps_utils.py b/discord_analyzer/analyzer/heatmaps_utils.py index 79b1b61..7a9c578 100644 --- a/discord_analyzer/analyzer/heatmaps_utils.py +++ b/discord_analyzer/analyzer/heatmaps_utils.py @@ -70,3 +70,39 @@ def get_bot_id( bot_ids = list(map(lambda x: x[id_field_name], bots)) return bot_ids + + +def get_userids( + db_mongo_client: MongoClient, + guildId: str, + collection_name: str = "guildmembers", + id_field_name: str = "discordId", +) -> list[str]: + """ + get user ids that are not bot + + Parameters: + ------------ + db_mongo_client : MongoClient + the access to database + guildId : str + the guildId to connect to + collection_name : str + the collection name to use + default is "guildmembers" + id_field_name : str + the fieldId that the account id is saved + default is "discordId" + + Returns: + --------- + user_ids : list[str] + the list of bot ids + """ + cursor = db_mongo_client[guildId][collection_name].find( + {"isBot": False}, {"_id": 0, id_field_name: 1} + ) + users = list(cursor) + user_ids = list(map(lambda user: user[id_field_name], users)) + + return user_ids diff --git a/tests/integration/test_memberactivities_mentions.py b/tests/integration/test_memberactivities_mentions.py new file mode 100644 index 0000000..1b3cb1c --- /dev/null +++ b/tests/integration/test_memberactivities_mentions.py @@ -0,0 +1,78 @@ +from datetime import datetime, timedelta +from unittest import TestCase + +from .utils.analyzer_setup import launch_db_access, setup_analyzer +from .utils.remove_and_setup_guild import setup_db_guild + + +class TestMemberActivitiesReply(TestCase): + def setUp(self) -> None: + self.guildId = "1234" + self.db_access = launch_db_access(self.guildId) + + def test_single_user_interaction(self): + users_id_list = ["user1", "user2"] + action = { + "INT_THR": 1, + "UW_DEG_THR": 1, + "PAUSED_T_THR": 1, + "CON_T_THR": 4, + "CON_O_THR": 3, + "EDGE_STR_THR": 5, + "UW_THR_DEG_THR": 5, + "VITAL_T_THR": 4, + "VITAL_O_THR": 3, + "STILL_T_THR": 2, + "STILL_O_THR": 2, + "DROP_H_THR": 2, + "DROP_I_THR": 1, + } + setup_db_guild( + self.db_access, + self.guildId, + discordId_list=users_id_list, + days_ago_period=35, + action=action, + ) + self.db_access.db_mongo_client[self.guildId]["heatmaps"].delete_many({}) + self.db_access.db_mongo_client[self.guildId].create_collection("heatmaps") + + rawinfo_samples = [] + for i in range(35 * 24): + sample = { + "type": 0, + "author": "user1", + "content": f"test message {i} @user2", + "user_mentions": ["user2"], + "role_mentions": [], + "reactions": [], + "replied_user": None, + "createdDate": (datetime.now() - timedelta(hours=i)), + "messageId": f"11188143219343360{i}", + "channelId": "1020707129214111827", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + } + rawinfo_samples.append(sample) + + self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( + rawinfo_samples + ) + analyzer = setup_analyzer() + analyzer.recompute_analytics(self.guildId) + cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( + {}, + { + "_id": 0, + "all_active": 1, + }, + ) + + # memberactivities + computed_analytics = list(cursor) + + for document in computed_analytics: + # user1 was replying user2 messages + self.assertEqual(document["all_active"], ["user1"]) diff --git a/tests/integration/test_memberactivities_reaction.py b/tests/integration/test_memberactivities_reaction.py new file mode 100644 index 0000000..c57264d --- /dev/null +++ b/tests/integration/test_memberactivities_reaction.py @@ -0,0 +1,82 @@ +from datetime import datetime, timedelta +from unittest import TestCase + +from .utils.analyzer_setup import launch_db_access, setup_analyzer +from .utils.remove_and_setup_guild import setup_db_guild + + +class TestMemberActivitiesReactions(TestCase): + def setUp(self) -> None: + self.guildId = "1234" + self.db_access = launch_db_access(self.guildId) + + def test_single_user_action(self): + """ + just actions and no interaction + """ + users_id_list = ["user1", "user2"] + action = { + "INT_THR": 1, + "UW_DEG_THR": 1, + "PAUSED_T_THR": 1, + "CON_T_THR": 4, + "CON_O_THR": 3, + "EDGE_STR_THR": 5, + "UW_THR_DEG_THR": 5, + "VITAL_T_THR": 4, + "VITAL_O_THR": 3, + "STILL_T_THR": 2, + "STILL_O_THR": 2, + "DROP_H_THR": 2, + "DROP_I_THR": 1, + } + setup_db_guild( + self.db_access, + self.guildId, + discordId_list=users_id_list, + days_ago_period=35, + action=action, + ) + self.db_access.db_mongo_client[self.guildId]["heatmaps"].delete_many({}) + self.db_access.db_mongo_client[self.guildId].create_collection("heatmaps") + + rawinfo_samples = [] + for i in range(35 * 24): + sample = { + "type": 0, + "author": "user1", + "content": f"test message {i}", + "user_mentions": [], + "role_mentions": [], + "reactions": ["user2,👍"], + "replied_user": None, + "createdDate": (datetime.now() - timedelta(hours=i)), + "messageId": f"11188143219343360{i}", + "channelId": "1020707129214111827", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + } + rawinfo_samples.append(sample) + + self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( + rawinfo_samples + ) + analyzer = setup_analyzer() + analyzer.recompute_analytics(self.guildId) + cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( + {}, + { + "_id": 0, + "all_active": 1, + }, + ) + + # memberactivities + computed_analytics = list(cursor) + + for document in computed_analytics: + # user1 was sending channel messages (lone_message) + # user2 was doing reactions + self.assertEqual(set(document["all_active"]), set(["user1", "user2"])) diff --git a/tests/integration/test_memberactivities_reply.py b/tests/integration/test_memberactivities_reply.py new file mode 100644 index 0000000..9b5a0e6 --- /dev/null +++ b/tests/integration/test_memberactivities_reply.py @@ -0,0 +1,78 @@ +from datetime import datetime, timedelta +from unittest import TestCase + +from .utils.analyzer_setup import launch_db_access, setup_analyzer +from .utils.remove_and_setup_guild import setup_db_guild + + +class TestMemberActivitiesReply(TestCase): + def setUp(self) -> None: + self.guildId = "1234" + self.db_access = launch_db_access(self.guildId) + + def test_single_user_interaction(self): + users_id_list = ["user1", "user2"] + action = { + "INT_THR": 1, + "UW_DEG_THR": 1, + "PAUSED_T_THR": 1, + "CON_T_THR": 4, + "CON_O_THR": 3, + "EDGE_STR_THR": 5, + "UW_THR_DEG_THR": 5, + "VITAL_T_THR": 4, + "VITAL_O_THR": 3, + "STILL_T_THR": 2, + "STILL_O_THR": 2, + "DROP_H_THR": 2, + "DROP_I_THR": 1, + } + setup_db_guild( + self.db_access, + self.guildId, + discordId_list=users_id_list, + days_ago_period=35, + action=action, + ) + self.db_access.db_mongo_client[self.guildId]["heatmaps"].delete_many({}) + self.db_access.db_mongo_client[self.guildId].create_collection("heatmaps") + + rawinfo_samples = [] + for i in range(35 * 24): + sample = { + "type": 19, + "author": "user1", + "content": f"test message {i}", + "user_mentions": [], + "role_mentions": [], + "reactions": [], + "replied_user": "user2", + "createdDate": (datetime.now() - timedelta(hours=i)), + "messageId": f"11188143219343360{i}", + "channelId": "1020707129214111827", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + } + rawinfo_samples.append(sample) + + self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( + rawinfo_samples + ) + analyzer = setup_analyzer() + analyzer.recompute_analytics(self.guildId) + cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( + {}, + { + "_id": 0, + "all_active": 1, + }, + ) + + # memberactivities + computed_analytics = list(cursor) + + for document in computed_analytics: + # user1 was replying user2 messages + self.assertEqual(document["all_active"], ["user1"]) From f32d482fc3d385f9e238c934be36773653789c17 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 6 Mar 2024 15:50:02 +0330 Subject: [PATCH 11/48] update: remove TODO comment! The reactions were for just one user, meaning if someone react to another person's message then just the ractor would get to be active. --- discord_analyzer/analysis/compute_member_activity.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/discord_analyzer/analysis/compute_member_activity.py b/discord_analyzer/analysis/compute_member_activity.py index fad07f2..a5a878e 100644 --- a/discord_analyzer/analysis/compute_member_activity.py +++ b/discord_analyzer/analysis/compute_member_activity.py @@ -230,8 +230,6 @@ def compute_member_activity( ], activities_ignore_1_axis=[ DiscordActivity.Reply, - # TODO: Why commenting reaction doesn't make any problems? - # DiscordActivity.Reaction, ], ) From 3180fdb3e05bdd3add5c81aea139eb435db1f9e2 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 6 Mar 2024 16:11:39 +0330 Subject: [PATCH 12/48] feat: more modularizing code! --- .../analysis/compute_member_activity.py | 54 +++---------- .../analysis/utils/member_activity_utils.py | 77 ++++++++++++++++++- 2 files changed, 86 insertions(+), 45 deletions(-) diff --git a/discord_analyzer/analysis/compute_member_activity.py b/discord_analyzer/analysis/compute_member_activity.py index a5a878e..b8d7c36 100644 --- a/discord_analyzer/analysis/compute_member_activity.py +++ b/discord_analyzer/analysis/compute_member_activity.py @@ -11,9 +11,6 @@ import networkx as nx import numpy as np from dateutil.relativedelta import relativedelta -from discord_analyzer.analysis.compute_interaction_matrix_discord import ( - compute_interaction_matrix_discord, -) from discord_analyzer.analysis.member_activity_history import check_past_history from discord_analyzer.analysis.utils.member_activity_history_utils import ( MemberActivityPastUtils, @@ -25,10 +22,9 @@ get_users_past_window, store_based_date, update_activities, + assess_engagement, ) from discord_analyzer.DB_operations.mongodb_access import DB_access -from tc_core_analyzer_lib.assess_engagement import EngagementAssessment -from tc_core_analyzer_lib.utils.activity import DiscordActivity def compute_member_activity( @@ -214,24 +210,6 @@ def compute_member_activity( last_start = time_diff - relativedelta(days=window_param["period_size"] - 1) # # # ACTUAL ANALYSIS # # # - activities_to_analyze = [ - DiscordActivity.Mention, - DiscordActivity.Reply, - DiscordActivity.Reaction, - DiscordActivity.Lone_msg, - DiscordActivity.Thread_msg, - ] - - # no need to ignore reactions - assess_engagment = EngagementAssessment( - activities=activities_to_analyze, - activities_ignore_0_axis=[ - DiscordActivity.Mention, - ], - activities_ignore_1_axis=[ - DiscordActivity.Reply, - ], - ) # for every window index max_range = int(np.floor(last_start.days / window_param["step_size"]) + 1) @@ -291,28 +269,16 @@ def compute_member_activity( # we could have empty outputs acc_names = get_latest_joined_users(db_access, count=5) - # obtain interaction matrix - int_mat = compute_interaction_matrix_discord( - acc_names, - date_list_w_str, - channels, - db_access, - activities=activities_to_analyze, - ) - print(f"int_mat: {int_mat}") - - # assess engagement - (graph_out, *activity_dict) = assess_engagment.compute( - int_mat=int_mat, + graph_out, activity_dict = assess_engagement( w_i=new_window_i, - acc_names=np.asarray(acc_names), - act_param=act_param, - WINDOW_D=window_param["period_size"], - **activity_dict, - ) - - activity_dict = convert_to_dict( - data=list(activity_dict), dict_keys=activities_name + accounts=acc_names, + action_params=act_param, + period_size=window_param["period_size"], + db_access=db_access, + channels=channels, + analyze_dates=date_list_w_str, + activities_name=activities_name, + activity_dict=activity_dict, ) # make empty dict for node attributes diff --git a/discord_analyzer/analysis/utils/member_activity_utils.py b/discord_analyzer/analysis/utils/member_activity_utils.py index 97ae499..7390d7a 100644 --- a/discord_analyzer/analysis/utils/member_activity_utils.py +++ b/discord_analyzer/analysis/utils/member_activity_utils.py @@ -2,8 +2,15 @@ from typing import Any import numpy as np +from networkx import DiGraph import pymongo + +from discord_analyzer.analysis.compute_interaction_matrix_discord import ( + compute_interaction_matrix_discord, +) from discord_analyzer.DB_operations.mongodb_access import DB_access +from tc_core_analyzer_lib.assess_engagement import EngagementAssessment +from tc_core_analyzer_lib.utils.activity import DiscordActivity def get_joined_accounts(db_access: DB_access, date_range: tuple[datetime, datetime]): @@ -41,7 +48,7 @@ def store_based_date( analytics_day_range, joined_acc_dict, load_past, - **kwargs + **kwargs, ): """ store the activities (`all_*`) in a dictionary based on their ending analytics date @@ -249,3 +256,71 @@ def get_latest_joined_users(db_access: DB_access, count: int = 5) -> list[str]: usersId = list(map(lambda x: x["discordId"], usersId)) return usersId + + +def assess_engagement( + w_i: int, + accounts: list[str], + action_params: dict[str, int], + period_size: int, + db_access: DB_access, + channels: list[str], + analyze_dates: list[str], + activities_name: list[str], + activity_dict: dict[str, dict], + **kwargs, +) -> tuple[DiGraph, dict[str, dict]]: + """ + assess engagement of a window index for users + + """ + activities_to_analyze = kwargs.get( + "activities_to_analyze", + [ + DiscordActivity.Mention, + DiscordActivity.Reply, + DiscordActivity.Reaction, + DiscordActivity.Lone_msg, + DiscordActivity.Thread_msg, + ], + ) + ignore_axis0 = kwargs.get( + "ignore_axis0", + [ + DiscordActivity.Mention, + ], + ) + # no need to ignore reactions + ignore_axis1 = kwargs.get( + "ignore_axis1", + [ + DiscordActivity.Reply, + ], + ) + + assess_engagment = EngagementAssessment( + activities=activities_to_analyze, + activities_ignore_0_axis=ignore_axis0, + activities_ignore_1_axis=ignore_axis1, + ) + # obtain interaction matrix + int_mat = compute_interaction_matrix_discord( + accounts, + analyze_dates, + channels, + db_access, + activities=activities_to_analyze, + ) + + # assess engagement + (graph_out, *activity_dict) = assess_engagment.compute( + int_mat=int_mat, + w_i=w_i, + acc_names=np.asarray(accounts), + act_param=action_params, + WINDOW_D=period_size, + **activity_dict, + ) + + activity_dict = convert_to_dict(data=list(activity_dict), dict_keys=activities_name) + return graph_out, activity_dict From 2e9296482d35a4770c484bc7115d00b0b76d9f4e Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 6 Mar 2024 16:30:12 +0330 Subject: [PATCH 13/48] fix: linter issues! --- discord_analyzer/analysis/compute_member_activity.py | 2 +- discord_analyzer/analysis/utils/member_activity_utils.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/discord_analyzer/analysis/compute_member_activity.py b/discord_analyzer/analysis/compute_member_activity.py index b8d7c36..a395aed 100644 --- a/discord_analyzer/analysis/compute_member_activity.py +++ b/discord_analyzer/analysis/compute_member_activity.py @@ -16,13 +16,13 @@ MemberActivityPastUtils, ) from discord_analyzer.analysis.utils.member_activity_utils import ( + assess_engagement, convert_to_dict, get_joined_accounts, get_latest_joined_users, get_users_past_window, store_based_date, update_activities, - assess_engagement, ) from discord_analyzer.DB_operations.mongodb_access import DB_access diff --git a/discord_analyzer/analysis/utils/member_activity_utils.py b/discord_analyzer/analysis/utils/member_activity_utils.py index 7390d7a..8c4213c 100644 --- a/discord_analyzer/analysis/utils/member_activity_utils.py +++ b/discord_analyzer/analysis/utils/member_activity_utils.py @@ -2,13 +2,12 @@ from typing import Any import numpy as np -from networkx import DiGraph import pymongo - from discord_analyzer.analysis.compute_interaction_matrix_discord import ( compute_interaction_matrix_discord, ) from discord_analyzer.DB_operations.mongodb_access import DB_access +from networkx import DiGraph from tc_core_analyzer_lib.assess_engagement import EngagementAssessment from tc_core_analyzer_lib.utils.activity import DiscordActivity From e6d7fcd7b233a69760e3b84f2051339da7e4ac02 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 6 Mar 2024 16:58:02 +0330 Subject: [PATCH 14/48] fix: reactions weren't being ignored! Now we found out and fixed the problems using more test cases. --- .../analysis/utils/member_activity_utils.py | 2 +- .../test_assess_engagement_mention.py | 153 ++++++++++++++++++ .../test_assess_engagement_reactions.py | 153 ++++++++++++++++++ .../test_assess_engagement_replies.py | 153 ++++++++++++++++++ 4 files changed, 460 insertions(+), 1 deletion(-) create mode 100644 tests/integration/test_assess_engagement_mention.py create mode 100644 tests/integration/test_assess_engagement_reactions.py create mode 100644 tests/integration/test_assess_engagement_replies.py diff --git a/discord_analyzer/analysis/utils/member_activity_utils.py b/discord_analyzer/analysis/utils/member_activity_utils.py index 8c4213c..976c865 100644 --- a/discord_analyzer/analysis/utils/member_activity_utils.py +++ b/discord_analyzer/analysis/utils/member_activity_utils.py @@ -289,11 +289,11 @@ def assess_engagement( DiscordActivity.Mention, ], ) - # no need to ignore reactions ignore_axis1 = kwargs.get( "ignore_axis1", [ DiscordActivity.Reply, + DiscordActivity.Reaction, ], ) diff --git a/tests/integration/test_assess_engagement_mention.py b/tests/integration/test_assess_engagement_mention.py new file mode 100644 index 0000000..b96d74a --- /dev/null +++ b/tests/integration/test_assess_engagement_mention.py @@ -0,0 +1,153 @@ +from datetime import datetime, timedelta +from unittest import TestCase + +from .utils.analyzer_setup import launch_db_access +from .utils.remove_and_setup_guild import setup_db_guild +from discord_analyzer.analysis.utils.member_activity_utils import ( + assess_engagement, +) +from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps +from tc_core_analyzer_lib.utils.activity import DiscordActivity +from discord_analyzer.analyzer.base_analyzer import Base_analyzer +from utils.daolytics_uitls import ( + get_mongo_credentials, + get_neo4j_credentials, +) + + +class TestAssessEngagementMentions(TestCase): + def setUp(self) -> None: + self.guildId = "1234" + self.db_access = launch_db_access(self.guildId) + self.create_db_connections() + + def create_db_connections(self): + base_analyzer = Base_analyzer() + mongo_creds = get_mongo_credentials() + base_analyzer.set_mongo_database_info( + mongo_db_user=mongo_creds["user"], + mongo_db_password=mongo_creds["password"], + mongo_db_host=mongo_creds["host"], + mongo_db_port=mongo_creds["port"], + ) + neo4j_creds = get_neo4j_credentials() + base_analyzer.set_neo4j_database_info(neo4j_creds) + base_analyzer.database_connect() + self.db_connections = base_analyzer.DB_connections + + def heatmaps_analytics(self): + """ + heatmaps are the input for assess_engagement's interaction matrix + """ + heatmaps = Heatmaps(DB_connections=self.db_connections, testing=False) + heatmaps_data = heatmaps.analysis_heatmap(guildId=self.guildId, from_start=True) + analytics_data = {} + analytics_data[f"{self.guildId}"] = { + "heatmaps": heatmaps_data, + "memberactivities": ( + None, + None, + ), + } + self.db_connections.store_analytics_data( + analytics_data=analytics_data, + community_id="123", + remove_memberactivities=False, + remove_heatmaps=False, + ) + + def test_single_user_mention(self): + """ + just actions and no interaction + """ + users_id_list = ["user1", "user2"] + action = { + "INT_THR": 1, + "UW_DEG_THR": 1, + "PAUSED_T_THR": 1, + "CON_T_THR": 4, + "CON_O_THR": 3, + "EDGE_STR_THR": 5, + "UW_THR_DEG_THR": 5, + "VITAL_T_THR": 4, + "VITAL_O_THR": 3, + "STILL_T_THR": 2, + "STILL_O_THR": 2, + "DROP_H_THR": 2, + "DROP_I_THR": 1, + } + setup_db_guild( + self.db_access, + self.guildId, + discordId_list=users_id_list, + days_ago_period=35, + action=action, + ) + self.db_access.db_mongo_client[self.guildId]["heatmaps"].delete_many({}) + self.db_access.db_mongo_client[self.guildId].create_collection("heatmaps") + + rawinfo_samples = [] + analyze_dates = set() + for i in range(35 * 24): + raw_data_date = datetime.now() - timedelta(hours=i) + sample = { + "type": 0, + "author": "user2", + "content": f"test message {i} @user1", + "user_mentions": ["user1"], + "role_mentions": [], + "reactions": [], + "replied_user": None, + "createdDate": raw_data_date, + "messageId": f"11188143219343360{i}", + "channelId": "1020707129214111827", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + } + analyze_dates.add(raw_data_date.strftime("%Y-%m-%d")) + rawinfo_samples.append(sample) + + self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( + rawinfo_samples + ) + self.heatmaps_analytics() + + activity_dict: dict[str, dict] = { + "all_joined": {"0": set()}, + "all_joined_day": {"0": set()}, + "all_consistent": {}, + "all_vital": {}, + "all_active": {}, + "all_connected": {}, + "all_paused": {}, + "all_new_disengaged": {}, + "all_disengaged": {}, + "all_unpaused": {}, + "all_returned": {}, + "all_new_active": {}, + "all_still_active": {}, + "all_dropped": {}, + "all_disengaged_were_newly_active": {}, + "all_disengaged_were_consistently_active": {}, + "all_disengaged_were_vital": {}, + "all_lurker": {}, + "all_about_to_disengage": {}, + "all_disengaged_in_past": {}, + } + _, activity_dict = assess_engagement( + w_i=0, + accounts=users_id_list, + action_params=action, + period_size=7, + db_access=self.db_access, + channels=["1020707129214111827"], + analyze_dates=list(analyze_dates), + activities_name=list(activity_dict.keys()), + activity_dict=activity_dict, + activities_to_analyze=[ + DiscordActivity.Mention, + ], + ) + self.assertEqual(activity_dict["all_active"], {"0": set(["user2"])}) diff --git a/tests/integration/test_assess_engagement_reactions.py b/tests/integration/test_assess_engagement_reactions.py new file mode 100644 index 0000000..59713ce --- /dev/null +++ b/tests/integration/test_assess_engagement_reactions.py @@ -0,0 +1,153 @@ +from datetime import datetime, timedelta +from unittest import TestCase + +from .utils.analyzer_setup import launch_db_access +from .utils.remove_and_setup_guild import setup_db_guild +from discord_analyzer.analysis.utils.member_activity_utils import ( + assess_engagement, +) +from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps +from tc_core_analyzer_lib.utils.activity import DiscordActivity +from discord_analyzer.analyzer.base_analyzer import Base_analyzer +from utils.daolytics_uitls import ( + get_mongo_credentials, + get_neo4j_credentials, +) + + +class TestAssessEngagementReactions(TestCase): + def setUp(self) -> None: + self.guildId = "1234" + self.db_access = launch_db_access(self.guildId) + self.create_db_connections() + + def create_db_connections(self): + base_analyzer = Base_analyzer() + mongo_creds = get_mongo_credentials() + base_analyzer.set_mongo_database_info( + mongo_db_user=mongo_creds["user"], + mongo_db_password=mongo_creds["password"], + mongo_db_host=mongo_creds["host"], + mongo_db_port=mongo_creds["port"], + ) + neo4j_creds = get_neo4j_credentials() + base_analyzer.set_neo4j_database_info(neo4j_creds) + base_analyzer.database_connect() + self.db_connections = base_analyzer.DB_connections + + def heatmaps_analytics(self): + """ + heatmaps are the input for assess_engagement's interaction matrix + """ + heatmaps = Heatmaps(DB_connections=self.db_connections, testing=False) + heatmaps_data = heatmaps.analysis_heatmap(guildId=self.guildId, from_start=True) + analytics_data = {} + analytics_data[f"{self.guildId}"] = { + "heatmaps": heatmaps_data, + "memberactivities": ( + None, + None, + ), + } + self.db_connections.store_analytics_data( + analytics_data=analytics_data, + community_id="123", + remove_memberactivities=False, + remove_heatmaps=False, + ) + + def test_single_user_reaction(self): + """ + just actions and no interaction + """ + users_id_list = ["user1", "user2"] + action = { + "INT_THR": 1, + "UW_DEG_THR": 1, + "PAUSED_T_THR": 1, + "CON_T_THR": 4, + "CON_O_THR": 3, + "EDGE_STR_THR": 5, + "UW_THR_DEG_THR": 5, + "VITAL_T_THR": 4, + "VITAL_O_THR": 3, + "STILL_T_THR": 2, + "STILL_O_THR": 2, + "DROP_H_THR": 2, + "DROP_I_THR": 1, + } + setup_db_guild( + self.db_access, + self.guildId, + discordId_list=users_id_list, + days_ago_period=35, + action=action, + ) + self.db_access.db_mongo_client[self.guildId]["heatmaps"].delete_many({}) + self.db_access.db_mongo_client[self.guildId].create_collection("heatmaps") + + rawinfo_samples = [] + analyze_dates = set() + for i in range(35 * 24): + raw_data_date = datetime.now() - timedelta(hours=i) + sample = { + "type": 0, + "author": "user1", + "content": f"test message {i}", + "user_mentions": [], + "role_mentions": [], + "reactions": ["user2,👍"], + "replied_user": None, + "createdDate": raw_data_date, + "messageId": f"11188143219343360{i}", + "channelId": "1020707129214111827", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + } + analyze_dates.add(raw_data_date.strftime("%Y-%m-%d")) + rawinfo_samples.append(sample) + + self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( + rawinfo_samples + ) + self.heatmaps_analytics() + + activity_dict: dict[str, dict] = { + "all_joined": {"0": set()}, + "all_joined_day": {"0": set()}, + "all_consistent": {}, + "all_vital": {}, + "all_active": {}, + "all_connected": {}, + "all_paused": {}, + "all_new_disengaged": {}, + "all_disengaged": {}, + "all_unpaused": {}, + "all_returned": {}, + "all_new_active": {}, + "all_still_active": {}, + "all_dropped": {}, + "all_disengaged_were_newly_active": {}, + "all_disengaged_were_consistently_active": {}, + "all_disengaged_were_vital": {}, + "all_lurker": {}, + "all_about_to_disengage": {}, + "all_disengaged_in_past": {}, + } + _, activity_dict = assess_engagement( + w_i=0, + accounts=users_id_list, + action_params=action, + period_size=7, + db_access=self.db_access, + channels=["1020707129214111827"], + analyze_dates=list(analyze_dates), + activities_name=list(activity_dict.keys()), + activity_dict=activity_dict, + activities_to_analyze=[ + DiscordActivity.Reaction, + ], + ) + self.assertEqual(activity_dict["all_active"], {"0": set(["user2"])}) diff --git a/tests/integration/test_assess_engagement_replies.py b/tests/integration/test_assess_engagement_replies.py new file mode 100644 index 0000000..a91db21 --- /dev/null +++ b/tests/integration/test_assess_engagement_replies.py @@ -0,0 +1,153 @@ +from datetime import datetime, timedelta +from unittest import TestCase + +from .utils.analyzer_setup import launch_db_access +from .utils.remove_and_setup_guild import setup_db_guild +from discord_analyzer.analysis.utils.member_activity_utils import ( + assess_engagement, +) +from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps +from tc_core_analyzer_lib.utils.activity import DiscordActivity +from discord_analyzer.analyzer.base_analyzer import Base_analyzer +from utils.daolytics_uitls import ( + get_mongo_credentials, + get_neo4j_credentials, +) + + +class TestAssessEngagementReplies(TestCase): + def setUp(self) -> None: + self.guildId = "1234" + self.db_access = launch_db_access(self.guildId) + self.create_db_connections() + + def create_db_connections(self): + base_analyzer = Base_analyzer() + mongo_creds = get_mongo_credentials() + base_analyzer.set_mongo_database_info( + mongo_db_user=mongo_creds["user"], + mongo_db_password=mongo_creds["password"], + mongo_db_host=mongo_creds["host"], + mongo_db_port=mongo_creds["port"], + ) + neo4j_creds = get_neo4j_credentials() + base_analyzer.set_neo4j_database_info(neo4j_creds) + base_analyzer.database_connect() + self.db_connections = base_analyzer.DB_connections + + def heatmaps_analytics(self): + """ + heatmaps are the input for assess_engagement's interaction matrix + """ + heatmaps = Heatmaps(DB_connections=self.db_connections, testing=False) + heatmaps_data = heatmaps.analysis_heatmap(guildId=self.guildId, from_start=True) + analytics_data = {} + analytics_data[f"{self.guildId}"] = { + "heatmaps": heatmaps_data, + "memberactivities": ( + None, + None, + ), + } + self.db_connections.store_analytics_data( + analytics_data=analytics_data, + community_id="123", + remove_memberactivities=False, + remove_heatmaps=False, + ) + + def test_single_user_reply(self): + """ + just actions and no interaction + """ + users_id_list = ["user1", "user2"] + action = { + "INT_THR": 1, + "UW_DEG_THR": 1, + "PAUSED_T_THR": 1, + "CON_T_THR": 4, + "CON_O_THR": 3, + "EDGE_STR_THR": 5, + "UW_THR_DEG_THR": 5, + "VITAL_T_THR": 4, + "VITAL_O_THR": 3, + "STILL_T_THR": 2, + "STILL_O_THR": 2, + "DROP_H_THR": 2, + "DROP_I_THR": 1, + } + setup_db_guild( + self.db_access, + self.guildId, + discordId_list=users_id_list, + days_ago_period=35, + action=action, + ) + self.db_access.db_mongo_client[self.guildId]["heatmaps"].delete_many({}) + self.db_access.db_mongo_client[self.guildId].create_collection("heatmaps") + + rawinfo_samples = [] + analyze_dates = set() + for i in range(35 * 24): + raw_data_date = datetime.now() - timedelta(hours=i) + sample = { + "type": 19, + "author": "user1", + "content": f"test message {i}", + "user_mentions": [], + "role_mentions": [], + "reactions": [], + "replied_user": "user2", + "createdDate": raw_data_date, + "messageId": f"11188143219343360{i}", + "channelId": "1020707129214111827", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + } + analyze_dates.add(raw_data_date.strftime("%Y-%m-%d")) + rawinfo_samples.append(sample) + + self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( + rawinfo_samples + ) + self.heatmaps_analytics() + + activity_dict: dict[str, dict] = { + "all_joined": {"0": set()}, + "all_joined_day": {"0": set()}, + "all_consistent": {}, + "all_vital": {}, + "all_active": {}, + "all_connected": {}, + "all_paused": {}, + "all_new_disengaged": {}, + "all_disengaged": {}, + "all_unpaused": {}, + "all_returned": {}, + "all_new_active": {}, + "all_still_active": {}, + "all_dropped": {}, + "all_disengaged_were_newly_active": {}, + "all_disengaged_were_consistently_active": {}, + "all_disengaged_were_vital": {}, + "all_lurker": {}, + "all_about_to_disengage": {}, + "all_disengaged_in_past": {}, + } + _, activity_dict = assess_engagement( + w_i=0, + accounts=users_id_list, + action_params=action, + period_size=7, + db_access=self.db_access, + channels=["1020707129214111827"], + analyze_dates=list(analyze_dates), + activities_name=list(activity_dict.keys()), + activity_dict=activity_dict, + activities_to_analyze=[ + DiscordActivity.Reply, + ], + ) + self.assertEqual(activity_dict["all_active"], {"0": set(["user1"])}) From 00720f3494fe1cae0d04cdab095e0b37b2021196 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 6 Mar 2024 17:11:57 +0330 Subject: [PATCH 15/48] fix: isort linter error! --- .../test_assess_engagement_mention.py | 16 ++++++---------- .../test_assess_engagement_reactions.py | 16 ++++++---------- .../test_assess_engagement_replies.py | 16 ++++++---------- 3 files changed, 18 insertions(+), 30 deletions(-) diff --git a/tests/integration/test_assess_engagement_mention.py b/tests/integration/test_assess_engagement_mention.py index b96d74a..f6dfb4d 100644 --- a/tests/integration/test_assess_engagement_mention.py +++ b/tests/integration/test_assess_engagement_mention.py @@ -1,18 +1,14 @@ from datetime import datetime, timedelta from unittest import TestCase -from .utils.analyzer_setup import launch_db_access -from .utils.remove_and_setup_guild import setup_db_guild -from discord_analyzer.analysis.utils.member_activity_utils import ( - assess_engagement, -) +from discord_analyzer.analysis.utils.member_activity_utils import assess_engagement from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps -from tc_core_analyzer_lib.utils.activity import DiscordActivity from discord_analyzer.analyzer.base_analyzer import Base_analyzer -from utils.daolytics_uitls import ( - get_mongo_credentials, - get_neo4j_credentials, -) +from tc_core_analyzer_lib.utils.activity import DiscordActivity +from utils.daolytics_uitls import get_mongo_credentials, get_neo4j_credentials + +from .utils.analyzer_setup import launch_db_access +from .utils.remove_and_setup_guild import setup_db_guild class TestAssessEngagementMentions(TestCase): diff --git a/tests/integration/test_assess_engagement_reactions.py b/tests/integration/test_assess_engagement_reactions.py index 59713ce..569cf48 100644 --- a/tests/integration/test_assess_engagement_reactions.py +++ b/tests/integration/test_assess_engagement_reactions.py @@ -1,18 +1,14 @@ from datetime import datetime, timedelta from unittest import TestCase -from .utils.analyzer_setup import launch_db_access -from .utils.remove_and_setup_guild import setup_db_guild -from discord_analyzer.analysis.utils.member_activity_utils import ( - assess_engagement, -) +from discord_analyzer.analysis.utils.member_activity_utils import assess_engagement from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps -from tc_core_analyzer_lib.utils.activity import DiscordActivity from discord_analyzer.analyzer.base_analyzer import Base_analyzer -from utils.daolytics_uitls import ( - get_mongo_credentials, - get_neo4j_credentials, -) +from tc_core_analyzer_lib.utils.activity import DiscordActivity +from utils.daolytics_uitls import get_mongo_credentials, get_neo4j_credentials + +from .utils.analyzer_setup import launch_db_access +from .utils.remove_and_setup_guild import setup_db_guild class TestAssessEngagementReactions(TestCase): diff --git a/tests/integration/test_assess_engagement_replies.py b/tests/integration/test_assess_engagement_replies.py index a91db21..5ae7261 100644 --- a/tests/integration/test_assess_engagement_replies.py +++ b/tests/integration/test_assess_engagement_replies.py @@ -1,18 +1,14 @@ from datetime import datetime, timedelta from unittest import TestCase -from .utils.analyzer_setup import launch_db_access -from .utils.remove_and_setup_guild import setup_db_guild -from discord_analyzer.analysis.utils.member_activity_utils import ( - assess_engagement, -) +from discord_analyzer.analysis.utils.member_activity_utils import assess_engagement from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps -from tc_core_analyzer_lib.utils.activity import DiscordActivity from discord_analyzer.analyzer.base_analyzer import Base_analyzer -from utils.daolytics_uitls import ( - get_mongo_credentials, - get_neo4j_credentials, -) +from tc_core_analyzer_lib.utils.activity import DiscordActivity +from utils.daolytics_uitls import get_mongo_credentials, get_neo4j_credentials + +from .utils.analyzer_setup import launch_db_access +from .utils.remove_and_setup_guild import setup_db_guild class TestAssessEngagementReplies(TestCase): From cb74ee313df78ec676f5ce6b882d3fc4b40658c3 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Thu, 7 Mar 2024 07:50:08 +0330 Subject: [PATCH 16/48] feat: delete comments! --- .../analysis/compute_interaction_matrix_discord.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/discord_analyzer/analysis/compute_interaction_matrix_discord.py b/discord_analyzer/analysis/compute_interaction_matrix_discord.py index f3cee51..df9bff5 100644 --- a/discord_analyzer/analysis/compute_interaction_matrix_discord.py +++ b/discord_analyzer/analysis/compute_interaction_matrix_discord.py @@ -1,9 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# compute_interaction_matrix_discord.py -# -# Author Ene SS Rawa / Tjitse van der Molen import copy from typing import Any From e2566b74986aa0179eff334e170b3111936dacbe Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Thu, 7 Mar 2024 08:10:27 +0330 Subject: [PATCH 17/48] feat: update dependency version! --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a10cc7d..756f771 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,7 +22,7 @@ tc-messageBroker==1.4.0 sentry-sdk rq redis -tc-core-analyzer-lib==1.1.0 +tc-core-analyzer-lib==1.2.0 tc-neo4j-lib==1.0.0 pybars3 backoff==2.2.1 From 315497cef9e45e8214639d846a2aa31c4d339ece Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 12 Mar 2024 14:17:03 +0330 Subject: [PATCH 18/48] feat: update test case command! To raise error if some tests failing. --- docker-entrypoint.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index c10c66b..46b7f9d 100644 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -python3 -m coverage run --omit=tests/* -m pytest tests +python3 -m coverage run --omit=tests/* -m pytest . && echo "Tests Passed" || exit 1 python3 -m coverage lcov -i -o coverage/lcov.info \ No newline at end of file From edde9c946a5d22530253ddd4b5dfcb780bf0a3fd Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 17 Apr 2024 17:07:43 +0330 Subject: [PATCH 19/48] update: dependency lib version! --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 756f771..069bc46 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,7 +22,7 @@ tc-messageBroker==1.4.0 sentry-sdk rq redis -tc-core-analyzer-lib==1.2.0 +tc-core-analyzer-lib==1.3.0 tc-neo4j-lib==1.0.0 pybars3 backoff==2.2.1 From 0b5d07243fdfb6e26f9e3a9543efe4a184336c22 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Mon, 20 May 2024 11:42:35 +0330 Subject: [PATCH 20/48] fix: code cleaning + removing some unnecessary codes! --- analyzer_init.py | 7 +- .../DB_operations/mongo_neo4j_ops.py | 75 +++--- .../DB_operations/mongodb_interaction.py | 32 --- .../analyzer/analyzer_heatmaps.py | 12 - discord_analyzer/analyzer/utils/__init__.py | 0 .../analyzer_db_manager.py} | 2 +- discord_analyzer/analyzer/utils/guild.py | 58 +++++ discord_analyzer/rn_analyzer.py | 241 ++++++------------ discord_utils.py | 10 +- utils/get_guild_utils.py | 6 +- 10 files changed, 190 insertions(+), 253 deletions(-) create mode 100644 discord_analyzer/analyzer/utils/__init__.py rename discord_analyzer/analyzer/{base_analyzer.py => utils/analyzer_db_manager.py} (98%) create mode 100644 discord_analyzer/analyzer/utils/guild.py diff --git a/analyzer_init.py b/analyzer_init.py index a241acd..b93568d 100644 --- a/analyzer_init.py +++ b/analyzer_init.py @@ -13,8 +13,9 @@ class AnalyzerInit: initialize the analyzer with its configs """ - def __init__(self, community_id: str) -> None: - self.community_id = community_id + # TODO: update to platform_id as input + def __init__(self, guild_id: str) -> None: + self.guild_id = guild_id def get_analyzer(self) -> tuple[RnDaoAnalyzer, dict[str, Any]]: """ @@ -23,7 +24,7 @@ def get_analyzer(self) -> tuple[RnDaoAnalyzer, dict[str, Any]]: analyzer : RnDaoAnalyzer mongo_creds : dict[str, Any] """ - analyzer = RnDaoAnalyzer(self.community_id) + analyzer = RnDaoAnalyzer(self.guild_id) # credentials mongo_creds = get_mongo_credentials() diff --git a/discord_analyzer/DB_operations/mongo_neo4j_ops.py b/discord_analyzer/DB_operations/mongo_neo4j_ops.py index b259dc2..79da7cd 100644 --- a/discord_analyzer/DB_operations/mongo_neo4j_ops.py +++ b/discord_analyzer/DB_operations/mongo_neo4j_ops.py @@ -59,6 +59,7 @@ def set_mongo_db_ops( def store_analytics_data( self, analytics_data: dict, + guild_id: str, community_id: str, remove_memberactivities: bool = False, remove_heatmaps: bool = False, @@ -69,12 +70,13 @@ def store_analytics_data( Parameters: ------------- - analytics_data : dictionary - a nested dictinoary with keys as guildId - and values as heatmaps and memberactivities data - heatmaps is also a list of dictinoaries + analytics_data : dict + a nested dictinoary with keys as `heatmaps`, and `memberactivities` + values of the heatmaps is a list of dictinoaries and memberactivities is a tuple of memberactivities dictionary list - and memebractivities networkx object dictionary list + and memebractivities networkx object dictionary list + guild_id: str + what the data is related to community_id : str the community id to save the data for remove_memberactivities : bool @@ -88,39 +90,38 @@ def store_analytics_data( ---------- `None` """ - for guildId in analytics_data.keys(): - heatmaps_data = analytics_data[guildId]["heatmaps"] - (memberactivities_data, memberactivities_networkx_data) = analytics_data[ - guildId - ]["memberactivities"] - - if not self.testing: - # mongodb transactions - self.mongoOps._do_analytics_write_transaction( - guildId=guildId, - delete_heatmaps=remove_heatmaps, - delete_member_acitivities=remove_memberactivities, - acitivties_list=memberactivities_data, - heatmaps_list=heatmaps_data, - ) + heatmaps_data = analytics_data["heatmaps"] + (memberactivities_data, memberactivities_networkx_data) = analytics_data[ + "memberactivities" + ] + + if not self.testing: + # mongodb transactions + self.mongoOps._do_analytics_write_transaction( + guildId=guild_id, + delete_heatmaps=remove_heatmaps, + delete_member_acitivities=remove_memberactivities, + acitivties_list=memberactivities_data, + heatmaps_list=heatmaps_data, + ) - # neo4j transactions - if ( - memberactivities_networkx_data is not None - and memberactivities_networkx_data != [] - ): - queries_list = make_neo4j_networkx_query_dict( - networkx_graphs=memberactivities_networkx_data, - guildId=guildId, - community_id=community_id, - ) - self.run_operations_transaction( - guildId=guildId, - queries_list=queries_list, - remove_memberactivities=remove_memberactivities, - ) - else: - logging.warning("Testing mode enabled! Not saving any data") + # neo4j transactions + if ( + memberactivities_networkx_data is not None + and memberactivities_networkx_data != [] + ): + queries_list = make_neo4j_networkx_query_dict( + networkx_graphs=memberactivities_networkx_data, + guildId=guild_id, + community_id=community_id, + ) + self.run_operations_transaction( + guildId=guild_id, + queries_list=queries_list, + remove_memberactivities=remove_memberactivities, + ) + else: + logging.warning("Testing mode enabled! Not saving any data") def run_operations_transaction( self, guildId, queries_list, remove_memberactivities diff --git a/discord_analyzer/DB_operations/mongodb_interaction.py b/discord_analyzer/DB_operations/mongodb_interaction.py index 4783c2f..c288040 100644 --- a/discord_analyzer/DB_operations/mongodb_interaction.py +++ b/discord_analyzer/DB_operations/mongodb_interaction.py @@ -188,38 +188,6 @@ def _batch_insertion(self, collection, data, message, batch_size): logging.info(f"{message}: Batch {loop_idx + 1}/{batch_count}") collection.insert_many(data[batch_idx : batch_idx + batch_size]) - def check_heatmaps(self, guildId, selectedChannels, heatmap_model): - """ - check whether all the channels are in heatmaps or not - - Parameters: - ------------- - guildId : str - the guildId to remove its collection data - selectedChannels : list - list of `channelId`s - heatmap_model : HeatMapModel - the heatmaps model to access it - - Returns: - --------- - is_available : bool - is all the selectedChannels available in heatmap collection or not - """ - heatmap_c = heatmap_model(self.mongo_db_access.db_mongo_client[guildId]) - channels = heatmap_c.get_channels_disctinct() - - if channels is not None: - # check if all the selected channels are available in heatmaps - is_available = all(element in selectedChannels for element in channels) - else: - log_msg = "MongoDB heatmaps table check raised an exception," - log_msg += " the heatmaps analysis wouldn't be done!" - logging.info(log_msg) - is_available = True - - return is_available - def empty_collection(self, session, guildId, activity): """ empty a specified collection diff --git a/discord_analyzer/analyzer/analyzer_heatmaps.py b/discord_analyzer/analyzer/analyzer_heatmaps.py index 6f16645..76de1c8 100644 --- a/discord_analyzer/analyzer/analyzer_heatmaps.py +++ b/discord_analyzer/analyzer/analyzer_heatmaps.py @@ -2,7 +2,6 @@ from collections import Counter from datetime import datetime, timedelta -# from analyzer.analyzer.base_analyzer import Base_analyzer from discord_analyzer.analysis.activity_hourly import activity_hourly from discord_analyzer.analyzer.heatmaps_utils import ( get_bot_id, @@ -21,17 +20,6 @@ def __init__(self, DB_connections: MongoNeo4jDB, testing: bool) -> None: self.DB_connections = DB_connections self.testing = testing - def is_empty(self, guildId: str): - """ - check whether the heatmaps for the guild is empty or not - """ - client = self.DB_connections.mongoOps.mongo_db_access.db_mongo_client - - heatmap_c = HeatMapModel(client[guildId]) - document = heatmap_c.get_one() - - return document is None - def analysis_heatmap(self, guildId: str, from_start: bool = False): """ Based on the rawdata creates and stores the heatmap data diff --git a/discord_analyzer/analyzer/utils/__init__.py b/discord_analyzer/analyzer/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/discord_analyzer/analyzer/base_analyzer.py b/discord_analyzer/analyzer/utils/analyzer_db_manager.py similarity index 98% rename from discord_analyzer/analyzer/base_analyzer.py rename to discord_analyzer/analyzer/utils/analyzer_db_manager.py index 0f7323d..7f7ad22 100644 --- a/discord_analyzer/analyzer/base_analyzer.py +++ b/discord_analyzer/analyzer/utils/analyzer_db_manager.py @@ -3,7 +3,7 @@ from discord_analyzer.DB_operations.mongo_neo4j_ops import MongoNeo4jDB -class Base_analyzer: +class AnalyzerDBManager: def __init__(self): """ base class for the analyzer diff --git a/discord_analyzer/analyzer/utils/guild.py b/discord_analyzer/analyzer/utils/guild.py new file mode 100644 index 0000000..30e03c6 --- /dev/null +++ b/discord_analyzer/analyzer/utils/guild.py @@ -0,0 +1,58 @@ +from utils.get_mongo_client import MongoSingleton + + +class Guild: + # TODO: Update to `Platform` and add `platform_id` in future + def __init__(self, guild_id: str) -> None: + self.guild_id = guild_id + self.client = MongoSingleton.get_instance().get_client() + + def check_existance(self) -> bool: + """ + check for existance of a Guild + + Returns + ---------- + exists : bool + if the Guild exist or not + """ + platform = self.client["Core"]["platforms"].find_one( + {"metadata.id": self.guild_id}, + {"_id": 1}, + ) + exists: bool + if platform is None: + exists = False + else: + exists = True + + return exists + + def update_isin_progress(self): + """ + update isInProgress field of platforms collection + """ + self.client["Core"]["platforms"].update_one( + {"metadata.id": self.guild_id}, {"$set": {"metadata.isInProgress": False}} + ) + + def get_community_id(self) -> str: + """ + get the community id of a guild + + Returns + -------- + community_id : str + the community that the Guild is related to + """ + platform = self.client["Core"]["platforms"].find_one( + {"metadata.id": self.guild_id}, {"community": 1} + ) + if platform is None: + raise ValueError( + f"No platform is available for the given guild: {self.guild_id}" + ) + + community_id = str(platform["community"]) + + return community_id diff --git a/discord_analyzer/rn_analyzer.py b/discord_analyzer/rn_analyzer.py index bd34c14..2d6e0ed 100644 --- a/discord_analyzer/rn_analyzer.py +++ b/discord_analyzer/rn_analyzer.py @@ -1,28 +1,28 @@ -#!/usr/bin/env python3 import logging from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps from discord_analyzer.analyzer.analyzer_memberactivities import MemberActivities -from discord_analyzer.analyzer.base_analyzer import Base_analyzer +from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics -from discord_analyzer.models.GuildsRnDaoModel import GuildsRnDaoModel -from discord_analyzer.models.HeatMapModel import HeatMapModel -from discord_analyzer.models.RawInfoModel import RawInfoModel +from discord_analyzer.analyzer.utils.guild import Guild -class RnDaoAnalyzer(Base_analyzer): +class RnDaoAnalyzer(AnalyzerDBManager): """ RnDaoAnalyzer class that handles database connection and data analysis """ - def __init__(self, community_id: str, testing=False): + def __init__(self, guild_id: str, testing=False): """ Class initiation function """ """ Testing, prevents from data upload""" self.testing = testing - self.community_id = community_id + self.guild_object = Guild(guild_id) + self.guild_id = guild_id + self.community_id = self.guild_object.get_community_id() + logging.basicConfig() logging.getLogger().setLevel(logging.INFO) @@ -30,111 +30,66 @@ def setup_neo4j_metrics(self) -> None: """ setup the neo4j analytics wrapper """ - self.neo4j_analytics = Neo4JAnalytics(neo4j_ops=self.DB_connections.neo4j_ops) - def run_once(self, guildId): + def run_once(self): """Run analysis once (Wrapper)""" + # check if the guild was available + # if not, will raise an error + self.check_guild() - guilds_c = GuildsRnDaoModel( - self.DB_connections.mongoOps.mongo_db_access.db_mongo_client["Core"] - ) - - guilds = guilds_c.get_connected_guilds(guildId) - - logging.info(f"Creating heatmaps for {guilds}") + logging.info(f"Creating heatmaps for guild: {self.guild_id}") # each guild data in a nested dictionary format guilds_data = {} - for guild in guilds: - logging.info(f"Doing Analytics for {guild}") - - heatmaps_analysis = Heatmaps(self.DB_connections, self.testing) - heatmaps_data = heatmaps_analysis.analysis_heatmap(guild) - - # storing heatmaps since memberactivities use them - analytics_data = {} - analytics_data[f"{guild}"] = { - "heatmaps": heatmaps_data, - "memberactivities": ( - None, - None, - ), - } - self.DB_connections.store_analytics_data( - analytics_data=analytics_data, - community_id=self.community_id, - remove_memberactivities=False, - remove_heatmaps=False, - ) - - memberactivities_analysis = MemberActivities(self.DB_connections) - ( - member_activities_data, - member_acitivities_networkx_data, - ) = memberactivities_analysis.analysis_member_activity( - guild, self.connection_str - ) - - # storing whole data into a dictinoary - guilds_data[f"{guild}"] = { - "heatmaps": None, - "memberactivities": ( - member_activities_data, - member_acitivities_networkx_data, - ), - } - - self.DB_connections.store_analytics_data( - analytics_data=guilds_data, - community_id=self.community_id, - remove_heatmaps=False, - remove_memberactivities=False, - ) - - self.neo4j_analytics.compute_metrics(guildId=guild, from_start=False) - - self._update_isin_progress(guildId=guild) - - def get_guilds(self): - """Returns the list of all guilds""" - client = self.DB_connections.mongoOps.mongo_db_access.db_mongo_client - rawinfo_c = RawInfoModel(client) - - logging.info(f"Listed guilds {rawinfo_c.database.list_collection_names()}") - - def recompute_analytics_on_guilds(self, guildId_list): - """ - recompute the analytics for the guilds available in Core table - if the guildId_list wasn't available in Core then don't recompute the analytics + logging.info(f"Doing Analytics for guild: {self.guild_id}") - Parameters: - -------------- - guildId_list : list of str - list of `guildId`s - Input can be `None` meaning recompute for all guilds + heatmaps_analysis = Heatmaps(self.DB_connections, self.testing) + heatmaps_data = heatmaps_analysis.analysis_heatmap(self.guild_id) - Returns: - --------- - `None` - """ - client = self.DB_connections.mongoOps.mongo_db_access.db_mongo_client + # storing heatmaps since memberactivities use them + analytics_data = {} + guilds_data["heatmaps"] = heatmaps_data + guilds_data["memberactivities"] = (None, None) + + self.DB_connections.store_analytics_data( + analytics_data=analytics_data, + guild_id=self.guild_id, + community_id=self.community_id, + remove_memberactivities=False, + remove_heatmaps=False, + ) - # check if the guild was available in Core table - guilds_c = GuildsRnDaoModel(client["Core"]) - guilds = guilds_c.get_connected_guilds(guildId_list) + memberactivities_analysis = MemberActivities(self.DB_connections) + ( + member_activities_data, + member_acitivities_networkx_data, + ) = memberactivities_analysis.analysis_member_activity( + self.guild_id, self.connection_str + ) - logging.info(f"Recomputing analytics for {guilds}") + analytics_data = {} + # storing whole data into a dictinoary + guilds_data["heatmaps"] = None + guilds_data["memberactivities"] = ( + member_activities_data, + member_acitivities_networkx_data, + ) - for guildId in guilds: - self.recompute_analytics(guildId) + self.DB_connections.store_analytics_data( + analytics_data=guilds_data, + guild_id=self.guild_id, + community_id=self.community_id, + remove_heatmaps=False, + remove_memberactivities=False, + ) - self._update_isin_progress(guildId=guildId) + self.neo4j_analytics.compute_metrics(guildId=self.guild_id, from_start=False) - return None + self.guild_object.update_isin_progress(guildId=self.guild_id) - def recompute_analytics(self, guildId): + def recompute_analytics(self): """ recompute the memberactivities (and heatmaps in case needed) for a new selection of channels @@ -149,103 +104,69 @@ def recompute_analytics(self, guildId): new channel selection (analytics would be inserted in memebractivities) - Parameters: - ------------- - guildId : str - the guildId to remove its collection data - Returns: --------- `None` """ - client = self.DB_connections.mongoOps.mongo_db_access.db_mongo_client + # check if the guild was available + # if not, will raise an error + self.check_guild() - guild_c = GuildsRnDaoModel(client["Core"]) - selectedChannels = guild_c.get_guild_channels(guildId=guildId) + heatmaps_analysis = Heatmaps(self.DB_connections, self.testing) - # check if all the channels were available in heatmaps - is_available = self.DB_connections.mongoOps.check_heatmaps( - guildId=guildId, - selectedChannels=selectedChannels, - heatmap_model=HeatMapModel, + logging.info(f"Analyzing the Heatmaps data for guild: {self.guild_id}!") + heatmaps_data = heatmaps_analysis.analysis_heatmap( + guildId=self.guild_id, from_start=True ) - # initialize variable - heatmaps_data = None - heatmaps_analysis = Heatmaps(self.DB_connections, self.testing) - heatmap_isempty = heatmaps_analysis.is_empty(guildId) - - # if not available we should remove heatmaps data - # and run the analytics for heatmaps too - # TODO: condition update - is_available = False - if not is_available or heatmap_isempty: - logging.info(f"Analyzing the Heatmaps data for guild: {guildId}!") - heatmaps_data = heatmaps_analysis.analysis_heatmap( - guildId=guildId, from_start=True - ) - # storing heatmaps since memberactivities use them analytics_data = {} - analytics_data[f"{guildId}"] = { - "heatmaps": heatmaps_data, - "memberactivities": ( - None, - None, - ), - } + analytics_data["heatmaps"] = heatmaps_data + analytics_data["memberactivities"] = (None, None) + self.DB_connections.store_analytics_data( analytics_data=analytics_data, + guild_id=self.guild_id, community_id=self.community_id, remove_memberactivities=False, - remove_heatmaps=not is_available, + remove_heatmaps=True, ) # run the member_activity analyze - logging.info(f"Analyzing the MemberActivities data for guild: {guildId}!") + logging.info(f"Analyzing the MemberActivities data for guild: {self.guild_id}!") memberactivity_analysis = MemberActivities(self.DB_connections) ( member_activities_data, member_acitivities_networkx_data, ) = memberactivity_analysis.analysis_member_activity( - guildId, self.connection_str, from_start=True + self.guild_id, self.connection_str, from_start=True ) # storing whole data into a dictinoary analytics_data = {} - analytics_data[f"{guildId}"] = { - "heatmaps": None, - "memberactivities": ( - member_activities_data, - member_acitivities_networkx_data, - ), - } + # storing whole data into a dictinoary + analytics_data["heatmaps"] = None + analytics_data["memberactivities"] = ( + member_activities_data, + member_acitivities_networkx_data, + ) + logging.info(f"Storing analytics data for guild: {self.guild_id}!") self.DB_connections.store_analytics_data( analytics_data=analytics_data, + guild_id=self.guild_id, community_id=self.community_id, remove_memberactivities=True, remove_heatmaps=False, ) - self.neo4j_analytics.compute_metrics(guildId=guildId, from_start=True) - - self._update_isin_progress(guildId=guildId) - - # returning a value when the jobs finished - return True + self.neo4j_analytics.compute_metrics(guildId=self.guild_id, from_start=True) + self.guild_object.update_isin_progress(guildId=self.guild_id) - def _update_isin_progress(self, guildId): + def check_guild(self): """ - update isInProgress field of platforms collection - - Parameters: - ------------ - guildId : str - the guildId to update its document + check if the guild is available """ - client = self.DB_connections.mongoOps.mongo_db_access.db_mongo_client - - client["Core"]["platforms"].update_one( - {"metadata.id": guildId}, {"$set": {"metadata.isInProgress": False}} - ) + exist = self.guild_object.check_existance() + if exist is False: + raise ValueError(f"Guild with guildId: {self.guild_id} doesn't exist!") diff --git a/discord_utils.py b/discord_utils.py index 5d7af82..91ef330 100644 --- a/discord_utils.py +++ b/discord_utils.py @@ -26,10 +26,10 @@ def analyzer_recompute(sagaId: str, rabbit_creds: dict[str, Any]): ) else: platform_id = saga.data["platformId"] - guildId, commnity_id = get_guild_community_ids(platform_id) + guildId = get_guild_community_ids(platform_id) logging.info("Initializing the analyzer") - analyzer_init = AnalyzerInit(commnity_id) + analyzer_init = AnalyzerInit(guildId) analyzer, mongo_creds = analyzer_init.get_analyzer() logging.info("Analyzer initialized") @@ -64,9 +64,9 @@ def analyzer_run_once(sagaId: str, rabbit_creds: dict[str, Any]): logging.warn(f"Saga not found!, stopping the run_once for sagaId: {sagaId}") else: platform_id = saga.data["platformId"] - guildId, commnity_id = get_guild_community_ids(platform_id) + guildId = get_guild_community_ids(platform_id) - analyzer_init = AnalyzerInit(commnity_id) + analyzer_init = AnalyzerInit(guildId) analyzer, mongo_creds = analyzer_init.get_analyzer() def run_once_wrapper(**kwargs): @@ -116,7 +116,7 @@ def publish_on_success(connection, result, *args, **kwargs): (transactions_ordered, tx_not_started_count) = sort_transactions(transactions) platform_id = saga.data["platformId"] - guildId, _ = get_guild_community_ids(platform_id) + guildId = get_guild_community_ids(platform_id) msg = f"GUILDID: {guildId}: " if tx_not_started_count != 0: diff --git a/utils/get_guild_utils.py b/utils/get_guild_utils.py index 90e8817..728a7ce 100644 --- a/utils/get_guild_utils.py +++ b/utils/get_guild_utils.py @@ -22,14 +22,14 @@ def get_guild_community_ids(platform_id: str) -> tuple[str, str]: obj_platform_id = ObjectId(platform_id) platform = mongo_client["Core"]["platforms"].find_one( - {"name": "discord", "_id": obj_platform_id} + {"name": "discord", "_id": obj_platform_id}, + {"metadata.id": 1}, ) if platform is None: raise AttributeError(f"PLATFORM_ID: {platform_id}, No guild found!") guild_id = platform["metadata"]["id"] - community_id = str(platform["community"]) - return guild_id, community_id + return guild_id def get_guild_platform_id(guild_id: str) -> str: From c815dd4ff6360474d571de84be0d4cbff9890063 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Mon, 20 May 2024 12:04:57 +0330 Subject: [PATCH 21/48] fix: update import names based on changes! --- tests/integration/test_assess_engagement_mention.py | 4 ++-- tests/integration/test_assess_engagement_reactions.py | 4 ++-- tests/integration/test_assess_engagement_replies.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/integration/test_assess_engagement_mention.py b/tests/integration/test_assess_engagement_mention.py index f6dfb4d..94e1154 100644 --- a/tests/integration/test_assess_engagement_mention.py +++ b/tests/integration/test_assess_engagement_mention.py @@ -3,7 +3,7 @@ from discord_analyzer.analysis.utils.member_activity_utils import assess_engagement from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps -from discord_analyzer.analyzer.base_analyzer import Base_analyzer +from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from tc_core_analyzer_lib.utils.activity import DiscordActivity from utils.daolytics_uitls import get_mongo_credentials, get_neo4j_credentials @@ -18,7 +18,7 @@ def setUp(self) -> None: self.create_db_connections() def create_db_connections(self): - base_analyzer = Base_analyzer() + base_analyzer = AnalyzerDBManager() mongo_creds = get_mongo_credentials() base_analyzer.set_mongo_database_info( mongo_db_user=mongo_creds["user"], diff --git a/tests/integration/test_assess_engagement_reactions.py b/tests/integration/test_assess_engagement_reactions.py index 569cf48..4c7f63e 100644 --- a/tests/integration/test_assess_engagement_reactions.py +++ b/tests/integration/test_assess_engagement_reactions.py @@ -3,7 +3,7 @@ from discord_analyzer.analysis.utils.member_activity_utils import assess_engagement from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps -from discord_analyzer.analyzer.base_analyzer import Base_analyzer +from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from tc_core_analyzer_lib.utils.activity import DiscordActivity from utils.daolytics_uitls import get_mongo_credentials, get_neo4j_credentials @@ -18,7 +18,7 @@ def setUp(self) -> None: self.create_db_connections() def create_db_connections(self): - base_analyzer = Base_analyzer() + base_analyzer = AnalyzerDBManager() mongo_creds = get_mongo_credentials() base_analyzer.set_mongo_database_info( mongo_db_user=mongo_creds["user"], diff --git a/tests/integration/test_assess_engagement_replies.py b/tests/integration/test_assess_engagement_replies.py index 5ae7261..b533b8b 100644 --- a/tests/integration/test_assess_engagement_replies.py +++ b/tests/integration/test_assess_engagement_replies.py @@ -3,7 +3,7 @@ from discord_analyzer.analysis.utils.member_activity_utils import assess_engagement from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps -from discord_analyzer.analyzer.base_analyzer import Base_analyzer +from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from tc_core_analyzer_lib.utils.activity import DiscordActivity from utils.daolytics_uitls import get_mongo_credentials, get_neo4j_credentials @@ -18,7 +18,7 @@ def setUp(self) -> None: self.create_db_connections() def create_db_connections(self): - base_analyzer = Base_analyzer() + base_analyzer = AnalyzerDBManager() mongo_creds = get_mongo_credentials() base_analyzer.set_mongo_database_info( mongo_db_user=mongo_creds["user"], From 2ad361adb3456e754c95f27200b58c0f051a6480 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Mon, 20 May 2024 16:11:29 +0330 Subject: [PATCH 22/48] feat: updating workflows based on new changes! Now the analyzer will be always for one guild now and cannot be instantiated for multiple guilds. This is for making its structure better. --- discord_analyzer/rn_analyzer.py | 19 ++---- discord_utils.py | 4 +- tests/integration/test_all_active_period.py | 11 +++- .../test_all_joined_day_members.py | 6 +- tests/integration/test_analyzer_init.py | 2 +- ...iod_1year_recompute_available_analytics.py | 9 ++- ..._period_1year_recompute_empty_analytics.py | 9 ++- ...riod_1year_run_once_available_analytics.py | 9 ++- ...r_period_1year_run_once_empty_analytics.py | 9 ++- .../test_analyzer_period_35days_run_once.py | 9 ++- ...od_3month_recompute_available_analytics.py | 9 ++- ...period_3month_recompute_empty_analytics.py | 9 ++- ...iod_3month_run_once_available_analytics.py | 9 ++- ..._period_3month_run_once_empty_analytics.py | 9 ++- ...od_6month_recompute_available_analytics.py | 9 ++- ...period_6month_recompute_empty_analytics.py | 9 ++- ...iod_6month_run_once_available_analytics.py | 9 ++- ..._period_6month_run_once_empty_analytics.py | 9 ++- ...iod_month_recompute_available_analytics.py | 9 ++- ..._period_month_recompute_empty_analytics.py | 9 ++- ...riod_month_run_once_available_analytics.py | 9 ++- ...r_period_month_run_once_empty_analytics.py | 9 ++- ...riod_week_recompute_available_analytics.py | 9 ++- ...r_period_week_recompute_empty_analytics.py | 9 ++- ...eriod_week_run_once_available_analytics.py | 9 ++- ...er_period_week_run_once_empty_analytics.py | 9 ++- .../test_assess_engagement_mention.py | 3 + .../test_assess_engagement_reactions.py | 3 + .../test_assess_engagement_replies.py | 2 + tests/integration/test_exclude_bots.py | 6 +- ..._generated_graph_period_1_year_run_once.py | 9 ++- .../test_generated_graph_period_1year.py | 9 ++- .../test_generated_graph_period_35_days.py | 9 ++- ...generated_graph_period_35_days_run_once.py | 9 ++- .../test_generated_graph_period_3_months.py | 9 ++- ...enerated_graph_period_3_months_run_once.py | 9 ++- .../test_generated_graph_period_6_months.py | 9 ++- ...enerated_graph_period_6_months_run_once.py | 10 ++- ...est_member_activities_action_all_active.py | 13 +++- ...member_activities_action_all_activities.py | 5 +- ...member_activity_from_start_no_past_data.py | 4 +- ...rom_start_with_guild_heatmaps_available.py | 9 ++- ...t_with_guild_memberactivities_available.py | 9 ++- ...r_activity_from_start_with_one_interval.py | 9 ++- .../integration/test_member_activity_utils.py | 5 +- .../test_memberactivities_mentions.py | 5 +- .../test_memberactivities_reaction.py | 5 +- .../test_memberactivities_reply.py | 5 +- ...t_mentioned_active_members_from_message.py | 9 ++- tests/integration/test_publish_on_success.py | 24 ++++++- tests/integration/utils/analyzer_setup.py | 66 ++++++++++++++++++- tests/integration/utils/mock_graph.py | 15 ++--- .../utils/remove_and_setup_guild.py | 3 +- 53 files changed, 358 insertions(+), 146 deletions(-) diff --git a/discord_analyzer/rn_analyzer.py b/discord_analyzer/rn_analyzer.py index 2d6e0ed..8a503cb 100644 --- a/discord_analyzer/rn_analyzer.py +++ b/discord_analyzer/rn_analyzer.py @@ -40,18 +40,13 @@ def run_once(self): logging.info(f"Creating heatmaps for guild: {self.guild_id}") - # each guild data in a nested dictionary format - guilds_data = {} - - logging.info(f"Doing Analytics for guild: {self.guild_id}") - heatmaps_analysis = Heatmaps(self.DB_connections, self.testing) heatmaps_data = heatmaps_analysis.analysis_heatmap(self.guild_id) # storing heatmaps since memberactivities use them analytics_data = {} - guilds_data["heatmaps"] = heatmaps_data - guilds_data["memberactivities"] = (None, None) + analytics_data["heatmaps"] = heatmaps_data + analytics_data["memberactivities"] = (None, None) self.DB_connections.store_analytics_data( analytics_data=analytics_data, @@ -71,14 +66,14 @@ def run_once(self): analytics_data = {} # storing whole data into a dictinoary - guilds_data["heatmaps"] = None - guilds_data["memberactivities"] = ( + analytics_data["heatmaps"] = None + analytics_data["memberactivities"] = ( member_activities_data, member_acitivities_networkx_data, ) self.DB_connections.store_analytics_data( - analytics_data=guilds_data, + analytics_data=analytics_data, guild_id=self.guild_id, community_id=self.community_id, remove_heatmaps=False, @@ -87,7 +82,7 @@ def run_once(self): self.neo4j_analytics.compute_metrics(guildId=self.guild_id, from_start=False) - self.guild_object.update_isin_progress(guildId=self.guild_id) + self.guild_object.update_isin_progress() def recompute_analytics(self): """ @@ -161,7 +156,7 @@ def recompute_analytics(self): ) self.neo4j_analytics.compute_metrics(guildId=self.guild_id, from_start=True) - self.guild_object.update_isin_progress(guildId=self.guild_id) + self.guild_object.update_isin_progress() def check_guild(self): """ diff --git a/discord_utils.py b/discord_utils.py index 91ef330..6b2d357 100644 --- a/discord_utils.py +++ b/discord_utils.py @@ -35,7 +35,7 @@ def analyzer_recompute(sagaId: str, rabbit_creds: dict[str, Any]): def recompute_wrapper(**kwargs): logging.info("recompute wrapper") - analyzer.recompute_analytics(guildId=guildId) + analyzer.recompute_analytics() def publish_wrapper(**kwargs): pass @@ -70,7 +70,7 @@ def analyzer_run_once(sagaId: str, rabbit_creds: dict[str, Any]): analyzer, mongo_creds = analyzer_init.get_analyzer() def run_once_wrapper(**kwargs): - analyzer.run_once(guildId=guildId) + analyzer.run_once() def publish_wrapper(**kwargs): pass diff --git a/tests/integration/test_all_active_period.py b/tests/integration/test_all_active_period.py index 3dbb302..32dc540 100644 --- a/tests/integration/test_all_active_period.py +++ b/tests/integration/test_all_active_period.py @@ -9,6 +9,7 @@ def test_two_weeks_period_active_members(): test all_active members for the two weeks period in the new schema """ guildId = "1234567" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,11 @@ def test_two_weeks_period_active_members(): # A guild connected at 35 days ago connected_days_before = 35 setup_db_guild( - db_access, guildId, discordId_list=acc_id, days_ago_period=connected_days_before + db_access, + platform_id, + guildId, + discordId_list=acc_id, + days_ago_period=connected_days_before, ) db_access.db_mongo_client[guildId].create_collection("heatmaps") @@ -112,8 +117,8 @@ def test_two_weeks_period_active_members(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", diff --git a/tests/integration/test_all_joined_day_members.py b/tests/integration/test_all_joined_day_members.py index 78728a5..45dae32 100644 --- a/tests/integration/test_all_joined_day_members.py +++ b/tests/integration/test_all_joined_day_members.py @@ -12,6 +12,7 @@ def test_all_joined_day_members(): testing the all_joined_day """ guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) today = datetime.now() @@ -28,6 +29,7 @@ def test_all_joined_day_members(): setup_db_guild( db_access, + platform_id, guildId, discordId_list=acc_id, dates=acc_join_dates, @@ -61,8 +63,8 @@ def test_all_joined_day_members(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() cursor = db_access.db_mongo_client[guildId]["memberactivities"].find([]) diff --git a/tests/integration/test_analyzer_init.py b/tests/integration/test_analyzer_init.py index 37b9b18..5c9ee06 100644 --- a/tests/integration/test_analyzer_init.py +++ b/tests/integration/test_analyzer_init.py @@ -8,9 +8,9 @@ def test_analyzer_init(): community_id = "aabbccddeeff001122334455" - analyzer = AnalyzerInit(community_id) guildId = "1234" + analyzer = AnalyzerInit(guildId) platform_id = "515151515151515151515151" days_ago_period = 30 mongo_creds = get_mongo_credentials() diff --git a/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py b/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py index 2d9a6d4..3b26751 100644 --- a/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py @@ -16,6 +16,7 @@ def test_analyzer_one_year_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -23,7 +24,9 @@ def test_analyzer_one_year_period_recompute_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=360) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=360 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -73,8 +76,8 @@ def test_analyzer_one_year_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py index 0842d51..fff0adc 100644 --- a/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py @@ -14,6 +14,7 @@ def test_analyzer_one_year_period_recompute_empty_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,9 @@ def test_analyzer_one_year_period_recompute_empty_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=360) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=360 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -53,8 +56,8 @@ def test_analyzer_one_year_period_recompute_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py b/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py index 949fdf7..efeb209 100644 --- a/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py @@ -16,6 +16,7 @@ def test_analyzer_one_year_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -23,7 +24,9 @@ def test_analyzer_one_year_period_run_once_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=360) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=360 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -73,8 +76,8 @@ def test_analyzer_one_year_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py index 1a4ce15..3dadfa3 100644 --- a/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py @@ -14,6 +14,7 @@ def test_analyzer_one_year_period_run_once_empty_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,9 @@ def test_analyzer_one_year_period_run_once_empty_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=360) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=360 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -53,8 +56,8 @@ def test_analyzer_one_year_period_run_once_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_35days_run_once.py b/tests/integration/test_analyzer_period_35days_run_once.py index 1e39e54..72c455d 100644 --- a/tests/integration/test_analyzer_period_35days_run_once.py +++ b/tests/integration/test_analyzer_period_35days_run_once.py @@ -17,6 +17,7 @@ def test_analyzer_40days_period_run_once_available_analytics_overlapping_period( """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -24,7 +25,9 @@ def test_analyzer_40days_period_run_once_available_analytics_overlapping_period( "user2", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=40) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=40 + ) db_access.db_mongo_client[guildId].drop_collection("heatmaps") db_access.db_mongo_client[guildId].drop_collection("memberactivities") @@ -76,8 +79,8 @@ def test_analyzer_40days_period_run_once_available_analytics_overlapping_period( db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py b/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py index 35dfba6..6ed037e 100644 --- a/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py @@ -16,6 +16,7 @@ def test_analyzer_three_month_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -23,7 +24,9 @@ def test_analyzer_three_month_period_recompute_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=90) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=90 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -73,8 +76,8 @@ def test_analyzer_three_month_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py index dd75b2d..3ea7903 100644 --- a/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py @@ -14,6 +14,7 @@ def test_analyzer_three_month_period_recompute_empty_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,9 @@ def test_analyzer_three_month_period_recompute_empty_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=90) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=90 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -53,8 +56,8 @@ def test_analyzer_three_month_period_recompute_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py b/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py index 9504978..46c43c5 100644 --- a/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py @@ -16,6 +16,7 @@ def test_analyzer_three_month_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -23,7 +24,9 @@ def test_analyzer_three_month_period_run_once_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=90) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=90 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -73,8 +76,8 @@ def test_analyzer_three_month_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py index 93d6ae2..e5f8fd6 100644 --- a/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py @@ -14,6 +14,7 @@ def test_analyzer_three_month_period_run_once_empty_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,9 @@ def test_analyzer_three_month_period_run_once_empty_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=90) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=90 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -53,8 +56,8 @@ def test_analyzer_three_month_period_run_once_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py b/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py index 9d3a531..623f210 100644 --- a/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py @@ -16,6 +16,7 @@ def test_analyzer_six_month_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -23,7 +24,9 @@ def test_analyzer_six_month_period_recompute_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=180) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=180 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -73,8 +76,8 @@ def test_analyzer_six_month_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py index 3bc0ebf..16415e7 100644 --- a/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py @@ -14,6 +14,7 @@ def test_analyzer_six_month_period_recompute_empty_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,9 @@ def test_analyzer_six_month_period_recompute_empty_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=180) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=180 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -53,8 +56,8 @@ def test_analyzer_six_month_period_recompute_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py b/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py index 4174abf..d8703d4 100644 --- a/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py @@ -16,6 +16,7 @@ def test_analyzer_six_month_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -23,7 +24,9 @@ def test_analyzer_six_month_period_run_once_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=180) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=180 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -73,8 +76,8 @@ def test_analyzer_six_month_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py index df451a0..a2aa516 100644 --- a/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py @@ -14,6 +14,7 @@ def test_analyzer_six_month_period_run_once_empty_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,9 @@ def test_analyzer_six_month_period_run_once_empty_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=180) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=180 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -53,8 +56,8 @@ def test_analyzer_six_month_period_run_once_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_month_recompute_available_analytics.py b/tests/integration/test_analyzer_period_month_recompute_available_analytics.py index b44039d..03a10da 100644 --- a/tests/integration/test_analyzer_period_month_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_month_recompute_available_analytics.py @@ -16,6 +16,7 @@ def test_analyzer_month_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -23,7 +24,9 @@ def test_analyzer_month_period_recompute_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=30) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=30 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -72,8 +75,8 @@ def test_analyzer_month_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py index 8fee78c..dd41400 100644 --- a/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py @@ -14,6 +14,7 @@ def test_analyzer_month_period_recompute_empty_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,9 @@ def test_analyzer_month_period_recompute_empty_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=30) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=30 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -52,8 +55,8 @@ def test_analyzer_month_period_recompute_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_month_run_once_available_analytics.py b/tests/integration/test_analyzer_period_month_run_once_available_analytics.py index 36f07fe..2f71a80 100644 --- a/tests/integration/test_analyzer_period_month_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_month_run_once_available_analytics.py @@ -16,6 +16,7 @@ def test_analyzer_month_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -23,7 +24,9 @@ def test_analyzer_month_period_run_once_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=30) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=30 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -72,8 +75,8 @@ def test_analyzer_month_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py index 1510889..d753c88 100644 --- a/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py @@ -14,6 +14,7 @@ def test_analyzer_month_period_run_once_empty_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,9 @@ def test_analyzer_month_period_run_once_empty_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=30) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=30 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -52,8 +55,8 @@ def test_analyzer_month_period_run_once_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_week_recompute_available_analytics.py b/tests/integration/test_analyzer_period_week_recompute_available_analytics.py index ad4a412..7c2b723 100644 --- a/tests/integration/test_analyzer_period_week_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_week_recompute_available_analytics.py @@ -17,13 +17,16 @@ def test_analyzer_week_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ "973993299281076285", "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=8) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=8 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -68,8 +71,8 @@ def test_analyzer_week_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find("memberactivities", {}) memberactivities_data = list(memberactivities_cursor) diff --git a/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py index b1b55ab..1bc3691 100644 --- a/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py @@ -14,6 +14,7 @@ def test_analyzer_week_period_recompute_empty_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,9 @@ def test_analyzer_week_period_recompute_empty_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=7) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=7 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -52,8 +55,8 @@ def test_analyzer_week_period_recompute_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_cursor = db_access.db_mongo_client[guildId][ "memberactivities" diff --git a/tests/integration/test_analyzer_period_week_run_once_available_analytics.py b/tests/integration/test_analyzer_period_week_run_once_available_analytics.py index c6814ed..bc5b726 100644 --- a/tests/integration/test_analyzer_period_week_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_week_run_once_available_analytics.py @@ -17,13 +17,16 @@ def test_analyzer_week_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ "973993299281076285", "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=8) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=8 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -67,8 +70,8 @@ def test_analyzer_week_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py index 81f791f..178a4e8 100644 --- a/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py @@ -14,6 +14,7 @@ def test_analyzer_week_period_run_once_empty_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ @@ -21,7 +22,9 @@ def test_analyzer_week_period_run_once_empty_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=7) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=7 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -51,8 +54,8 @@ def test_analyzer_week_period_run_once_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_assess_engagement_mention.py b/tests/integration/test_assess_engagement_mention.py index 94e1154..24a16f4 100644 --- a/tests/integration/test_assess_engagement_mention.py +++ b/tests/integration/test_assess_engagement_mention.py @@ -72,8 +72,11 @@ def test_single_user_mention(self): "DROP_H_THR": 2, "DROP_I_THR": 1, } + platform_id = "515151515151515151515151" + setup_db_guild( self.db_access, + platform_id, self.guildId, discordId_list=users_id_list, days_ago_period=35, diff --git a/tests/integration/test_assess_engagement_reactions.py b/tests/integration/test_assess_engagement_reactions.py index 4c7f63e..2ca9a9a 100644 --- a/tests/integration/test_assess_engagement_reactions.py +++ b/tests/integration/test_assess_engagement_reactions.py @@ -72,8 +72,11 @@ def test_single_user_reaction(self): "DROP_H_THR": 2, "DROP_I_THR": 1, } + platform_id = "515151515151515151515151" + setup_db_guild( self.db_access, + platform_id, self.guildId, discordId_list=users_id_list, days_ago_period=35, diff --git a/tests/integration/test_assess_engagement_replies.py b/tests/integration/test_assess_engagement_replies.py index b533b8b..f59ecf3 100644 --- a/tests/integration/test_assess_engagement_replies.py +++ b/tests/integration/test_assess_engagement_replies.py @@ -72,8 +72,10 @@ def test_single_user_reply(self): "DROP_H_THR": 2, "DROP_I_THR": 1, } + platform_id = "515151515151515151515151" setup_db_guild( self.db_access, + platform_id, self.guildId, discordId_list=users_id_list, days_ago_period=35, diff --git a/tests/integration/test_exclude_bots.py b/tests/integration/test_exclude_bots.py index 973e23d..8ed858a 100644 --- a/tests/integration/test_exclude_bots.py +++ b/tests/integration/test_exclude_bots.py @@ -10,6 +10,7 @@ def test_excluding_bots_heatmaps(): """ test if we're excluding bots from analyzer pipeline """ + platform_id = "515151515151515151515151" guildId = "1234567" db_access = launch_db_access(guildId) @@ -28,6 +29,7 @@ def test_excluding_bots_heatmaps(): connected_days_before = 35 setup_db_guild( db_access, + platform_id, guildId, discordId_list=acc_id, discordId_isbot=acc_isbots, @@ -65,8 +67,8 @@ def test_excluding_bots_heatmaps(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() db_access.db_mongo_client[guildId] diff --git a/tests/integration/test_generated_graph_period_1_year_run_once.py b/tests/integration/test_generated_graph_period_1_year_run_once.py index c9ec541..4586712 100644 --- a/tests/integration/test_generated_graph_period_1_year_run_once.py +++ b/tests/integration/test_generated_graph_period_1_year_run_once.py @@ -17,6 +17,7 @@ def test_networkgraph_one_year_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -31,7 +32,9 @@ def test_networkgraph_one_year_period_run_once_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=360) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=360 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -81,8 +84,8 @@ def test_networkgraph_one_year_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() results = neo4j_ops.gds.run_cypher( f""" diff --git a/tests/integration/test_generated_graph_period_1year.py b/tests/integration/test_generated_graph_period_1year.py index b332395..330b326 100644 --- a/tests/integration/test_generated_graph_period_1year.py +++ b/tests/integration/test_generated_graph_period_1year.py @@ -17,6 +17,7 @@ def test_networkgraph_one_year_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -31,7 +32,9 @@ def test_networkgraph_one_year_period_recompute_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=360) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=360 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -81,8 +84,8 @@ def test_networkgraph_one_year_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() results = neo4j_ops.gds.run_cypher( f""" diff --git a/tests/integration/test_generated_graph_period_35_days.py b/tests/integration/test_generated_graph_period_35_days.py index 9ee6c65..94f16c7 100644 --- a/tests/integration/test_generated_graph_period_35_days.py +++ b/tests/integration/test_generated_graph_period_35_days.py @@ -17,6 +17,7 @@ def test_networkgraph_35_days_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -31,7 +32,9 @@ def test_networkgraph_35_days_period_recompute_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=35) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=35 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -81,8 +84,8 @@ def test_networkgraph_35_days_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() results = neo4j_ops.gds.run_cypher( f""" diff --git a/tests/integration/test_generated_graph_period_35_days_run_once.py b/tests/integration/test_generated_graph_period_35_days_run_once.py index 8404904..b775551 100644 --- a/tests/integration/test_generated_graph_period_35_days_run_once.py +++ b/tests/integration/test_generated_graph_period_35_days_run_once.py @@ -17,6 +17,7 @@ def test_networkgraph_35_days_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -31,7 +32,9 @@ def test_networkgraph_35_days_period_run_once_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=35) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=35 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -81,8 +84,8 @@ def test_networkgraph_35_days_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() results = neo4j_ops.gds.run_cypher( f""" diff --git a/tests/integration/test_generated_graph_period_3_months.py b/tests/integration/test_generated_graph_period_3_months.py index a92d2dd..2642116 100644 --- a/tests/integration/test_generated_graph_period_3_months.py +++ b/tests/integration/test_generated_graph_period_3_months.py @@ -17,6 +17,7 @@ def test_networkgraph_three_months_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -31,7 +32,9 @@ def test_networkgraph_three_months_period_recompute_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=90) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=90 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -81,8 +84,8 @@ def test_networkgraph_three_months_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() results = neo4j_ops.gds.run_cypher( f""" diff --git a/tests/integration/test_generated_graph_period_3_months_run_once.py b/tests/integration/test_generated_graph_period_3_months_run_once.py index ccb3c13..5c242a6 100644 --- a/tests/integration/test_generated_graph_period_3_months_run_once.py +++ b/tests/integration/test_generated_graph_period_3_months_run_once.py @@ -17,6 +17,7 @@ def test_networkgraph_three_months_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -31,7 +32,9 @@ def test_networkgraph_three_months_period_run_once_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=90) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=90 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -81,8 +84,8 @@ def test_networkgraph_three_months_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() results = neo4j_ops.gds.run_cypher( f""" diff --git a/tests/integration/test_generated_graph_period_6_months.py b/tests/integration/test_generated_graph_period_6_months.py index a5beb77..8736514 100644 --- a/tests/integration/test_generated_graph_period_6_months.py +++ b/tests/integration/test_generated_graph_period_6_months.py @@ -17,6 +17,7 @@ def test_networkgraph_six_months_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -31,7 +32,9 @@ def test_networkgraph_six_months_period_recompute_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=180) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=180 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -81,8 +84,8 @@ def test_networkgraph_six_months_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() results = neo4j_ops.gds.run_cypher( f""" diff --git a/tests/integration/test_generated_graph_period_6_months_run_once.py b/tests/integration/test_generated_graph_period_6_months_run_once.py index 870c8c7..f0e1daf 100644 --- a/tests/integration/test_generated_graph_period_6_months_run_once.py +++ b/tests/integration/test_generated_graph_period_6_months_run_once.py @@ -17,6 +17,8 @@ def test_networkgraph_six_months_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" + db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -31,7 +33,9 @@ def test_networkgraph_six_months_period_run_once_available_analytics(): "973993299281076286", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=180) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=180 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -81,8 +85,8 @@ def test_networkgraph_six_months_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() results = neo4j_ops.gds.run_cypher( f""" diff --git a/tests/integration/test_member_activities_action_all_active.py b/tests/integration/test_member_activities_action_all_active.py index 55df84b..e63031f 100644 --- a/tests/integration/test_member_activities_action_all_active.py +++ b/tests/integration/test_member_activities_action_all_active.py @@ -11,9 +11,11 @@ def setUp(self) -> None: self.db_access = launch_db_access(self.guildId) def test_single_user_action(self): + platform_id = "515151515151515151515151" users_id_list = ["user1"] setup_db_guild( self.db_access, + platform_id, self.guildId, discordId_list=users_id_list, days_ago_period=35, @@ -44,7 +46,7 @@ def test_single_user_action(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer() + analyzer = setup_analyzer(self.guildId, platform_id) analyzer.recompute_analytics(self.guildId) cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, {"_id": 0, "all_active": 1} @@ -58,8 +60,11 @@ def test_single_user_action(self): def test_lone_msg_action(self): users_id_list = ["user1", "user2", "user3"] + platform_id = "515151515151515151515151" + setup_db_guild( self.db_access, + platform_id, self.guildId, discordId_list=users_id_list, days_ago_period=35, @@ -91,7 +96,7 @@ def test_lone_msg_action(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer() + analyzer = setup_analyzer(self.guildId, platform_id) analyzer.recompute_analytics(self.guildId) cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, {"_id": 0, "all_active": 1} @@ -104,9 +109,11 @@ def test_lone_msg_action(self): self.assertEqual(set(document["all_active"]), set(["user1", "user2"])) def test_thr_message_action(self): + platform_id = "515151515151515151515151" users_id_list = ["user1", "user2", "user3", "user4"] setup_db_guild( self.db_access, + platform_id, self.guildId, discordId_list=users_id_list, days_ago_period=35, @@ -138,7 +145,7 @@ def test_thr_message_action(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer() + analyzer = setup_analyzer(self.guildId, platform_id) analyzer.recompute_analytics(self.guildId) cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, {"_id": 0, "all_active": 1, "date": 1} diff --git a/tests/integration/test_member_activities_action_all_activities.py b/tests/integration/test_member_activities_action_all_activities.py index f19df8a..6ad3c0f 100644 --- a/tests/integration/test_member_activities_action_all_activities.py +++ b/tests/integration/test_member_activities_action_all_activities.py @@ -30,8 +30,11 @@ def test_single_user_action(self): "DROP_H_THR": 2, "DROP_I_THR": 1, } + platform_id = "515151515151515151515151" + setup_db_guild( self.db_access, + platform_id, self.guildId, discordId_list=users_id_list, days_ago_period=35, @@ -63,7 +66,7 @@ def test_single_user_action(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer() + analyzer = setup_analyzer(self.guildId, platform_id) analyzer.recompute_analytics(self.guildId) cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, diff --git a/tests/integration/test_member_activity_from_start_no_past_data.py b/tests/integration/test_member_activity_from_start_no_past_data.py index 4943f0c..8c46931 100644 --- a/tests/integration/test_member_activity_from_start_no_past_data.py +++ b/tests/integration/test_member_activity_from_start_no_past_data.py @@ -94,8 +94,8 @@ def test_analyzer_member_activities_from_start_empty_memberactivities(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_data = db_access.db_mongo_client[guildId][ "memberactivities" diff --git a/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py b/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py index 06a9a0f..22e336e 100644 --- a/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py +++ b/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py @@ -14,9 +14,12 @@ def test_analyzer_member_activities_from_start_available_heatmaps(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) - setup_db_guild(db_access, guildId, discordId_list=["973993299281076285"]) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=["973993299281076285"] + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -56,8 +59,8 @@ def test_analyzer_member_activities_from_start_available_heatmaps(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_data = db_access.db_mongo_client[guildId][ "memberactivities" diff --git a/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py b/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py index bd160ac..6cfac87 100644 --- a/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py +++ b/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py @@ -13,9 +13,12 @@ def test_analyzer_member_activities_from_start_available_member_activity(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) - setup_db_guild(db_access, guildId, discordId_list=["973993299281076285"]) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=["973993299281076285"] + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -50,8 +53,8 @@ def test_analyzer_member_activities_from_start_available_member_activity(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_data = db_access.db_mongo_client[guildId][ "memberactivities" diff --git a/tests/integration/test_member_activity_from_start_with_one_interval.py b/tests/integration/test_member_activity_from_start_with_one_interval.py index 3e42915..c066701 100644 --- a/tests/integration/test_member_activity_from_start_with_one_interval.py +++ b/tests/integration/test_member_activity_from_start_with_one_interval.py @@ -10,10 +10,13 @@ def test_analyzer_from_start_one_interval(): run the analyzer from start and just for one interval """ # first create the collections + platform_id = "515151515151515151515151" guildId = "1234" db_access = launch_db_access(guildId) - setup_db_guild(db_access, guildId, discordId_list=["973993299281076285"]) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=["973993299281076285"] + ) rawinfo_samples = [] @@ -41,8 +44,8 @@ def test_analyzer_from_start_one_interval(): db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") - analyzer = setup_analyzer() - analyzer.recompute_analytics(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.recompute_analytics() memberactivities_data = db_access.db_mongo_client[guildId][ "memberactivities" diff --git a/tests/integration/test_member_activity_utils.py b/tests/integration/test_member_activity_utils.py index 776261d..f6b8822 100644 --- a/tests/integration/test_member_activity_utils.py +++ b/tests/integration/test_member_activity_utils.py @@ -7,12 +7,13 @@ def test_utils_get_members(): - analyzer = setup_analyzer() + analyzer = setup_analyzer(guildId, platform_id) + platform_id = "515151515151515151515151" guildId = "1012430565959553145" db_access = launch_db_access(guildId) users = ["973993299281076285"] - setup_db_guild(db_access, guildId, discordId_list=users) + setup_db_guild(db_access, platform_id, guildId, discordId_list=users) rawinfo_samples = [] for i in range(150): diff --git a/tests/integration/test_memberactivities_mentions.py b/tests/integration/test_memberactivities_mentions.py index 1b3cb1c..c30efed 100644 --- a/tests/integration/test_memberactivities_mentions.py +++ b/tests/integration/test_memberactivities_mentions.py @@ -27,8 +27,11 @@ def test_single_user_interaction(self): "DROP_H_THR": 2, "DROP_I_THR": 1, } + platform_id = "515151515151515151515151" + setup_db_guild( self.db_access, + platform_id, self.guildId, discordId_list=users_id_list, days_ago_period=35, @@ -60,7 +63,7 @@ def test_single_user_interaction(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer() + analyzer = setup_analyzer(self.guildId, platform_id) analyzer.recompute_analytics(self.guildId) cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, diff --git a/tests/integration/test_memberactivities_reaction.py b/tests/integration/test_memberactivities_reaction.py index c57264d..ff94b19 100644 --- a/tests/integration/test_memberactivities_reaction.py +++ b/tests/integration/test_memberactivities_reaction.py @@ -30,8 +30,11 @@ def test_single_user_action(self): "DROP_H_THR": 2, "DROP_I_THR": 1, } + platform_id = "515151515151515151515151" + setup_db_guild( self.db_access, + platform_id, self.guildId, discordId_list=users_id_list, days_ago_period=35, @@ -63,7 +66,7 @@ def test_single_user_action(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer() + analyzer = setup_analyzer(self.guildId, platform_id) analyzer.recompute_analytics(self.guildId) cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, diff --git a/tests/integration/test_memberactivities_reply.py b/tests/integration/test_memberactivities_reply.py index 9b5a0e6..62d0b0a 100644 --- a/tests/integration/test_memberactivities_reply.py +++ b/tests/integration/test_memberactivities_reply.py @@ -12,6 +12,8 @@ def setUp(self) -> None: def test_single_user_interaction(self): users_id_list = ["user1", "user2"] + platform_id = "515151515151515151515151" + action = { "INT_THR": 1, "UW_DEG_THR": 1, @@ -29,6 +31,7 @@ def test_single_user_interaction(self): } setup_db_guild( self.db_access, + platform_id, self.guildId, discordId_list=users_id_list, days_ago_period=35, @@ -60,7 +63,7 @@ def test_single_user_interaction(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer() + analyzer = setup_analyzer(self.guildId, platform_id) analyzer.recompute_analytics(self.guildId) cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, diff --git a/tests/integration/test_mentioned_active_members_from_message.py b/tests/integration/test_mentioned_active_members_from_message.py index d0b94f3..6a6543a 100644 --- a/tests/integration/test_mentioned_active_members_from_message.py +++ b/tests/integration/test_mentioned_active_members_from_message.py @@ -12,13 +12,16 @@ def test_mention_active_members_from_rawinfo(): """ # first create the collections guildId = "1234" + platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) acc_id = [ "user1", "user2", ] - setup_db_guild(db_access, guildId, discordId_list=acc_id, days_ago_period=7) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=7 + ) db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") @@ -49,8 +52,8 @@ def test_mention_active_members_from_rawinfo(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer() - analyzer.run_once(guildId=guildId) + analyzer = setup_analyzer(guildId, platform_id) + analyzer.run_once() memberactivities_cursor = db_access.query_db_find( "memberactivities", {}, sorting=("date", -1) diff --git a/tests/integration/test_publish_on_success.py b/tests/integration/test_publish_on_success.py index 3afc4cb..de647e9 100644 --- a/tests/integration/test_publish_on_success.py +++ b/tests/integration/test_publish_on_success.py @@ -40,6 +40,26 @@ def test_publish_on_success_check_notification_choreographies(): db_access.db_mongo_client[guild_id].drop_collection("guildmembers") db_access.db_mongo_client[at_db].drop_collection(at_collection) + act_param = { + "INT_THR": 1, + "UW_DEG_THR": 1, + "PAUSED_T_THR": 1, + "CON_T_THR": 4, + "CON_O_THR": 3, + "EDGE_STR_THR": 5, + "UW_THR_DEG_THR": 5, + "VITAL_T_THR": 4, + "VITAL_O_THR": 3, + "STILL_T_THR": 2, + "STILL_O_THR": 2, + "DROP_H_THR": 2, + "DROP_I_THR": 1, + } + window = { + "period_size": 7, + "step_size": 1, + } + db_access.db_mongo_client["Core"]["platforms"].insert_one( { "_id": ObjectId(platform_id), @@ -49,8 +69,8 @@ def test_publish_on_success_check_notification_choreographies(): "icon": "111111111111111111111111", "name": "A guild", "selectedChannels": ["4455178"], - "window": [7, 1], - "action": [1, 1, 1, 4, 3, 5, 5, 4, 3, 3, 2, 2, 1], + "window": window, + "action": act_param, "period": datetime.now() - timedelta(days=10), }, "community": ObjectId("aabbccddeeff001122334455"), diff --git a/tests/integration/utils/analyzer_setup.py b/tests/integration/utils/analyzer_setup.py index 252aed1..f974134 100644 --- a/tests/integration/utils/analyzer_setup.py +++ b/tests/integration/utils/analyzer_setup.py @@ -1,13 +1,75 @@ import os +from datetime import datetime +from bson import ObjectId from discord_analyzer.DB_operations.mongodb_access import DB_access from discord_analyzer.rn_analyzer import RnDaoAnalyzer from dotenv import load_dotenv +from utils.get_mongo_client import MongoSingleton -def setup_analyzer() -> RnDaoAnalyzer: +def setup_analyzer( + guild_id: str, + platform_id: str, +) -> RnDaoAnalyzer: load_dotenv() - analyzer = RnDaoAnalyzer(community_id="1234555") + + client = MongoSingleton.get_instance().get_client() + + client["Core"].drop_collection("platforms") + + act_param = { + "INT_THR": 1, + "UW_DEG_THR": 1, + "PAUSED_T_THR": 1, + "CON_T_THR": 4, + "CON_O_THR": 3, + "EDGE_STR_THR": 5, + "UW_THR_DEG_THR": 5, + "VITAL_T_THR": 4, + "VITAL_O_THR": 3, + "STILL_T_THR": 2, + "STILL_O_THR": 2, + "DROP_H_THR": 2, + "DROP_I_THR": 1, + } + window = { + "period_size": 7, + "step_size": 1, + } + + client["Core"]["platforms"].insert_one( + { + "_id": ObjectId(platform_id), + "name": "discord", + "metadata": { + "id": guild_id, + "icon": "111111111111111111111111", + "name": "A guild", + "selectedChannels": [ + "11111111", + "22222222", + "33333333", + "44444444", + "55555555", + "66666666", + "77777777", + ], + "window": window, + "action": act_param, + "period": datetime(2023, 6, 1), + }, + "community": ObjectId("aabbccddeeff001122334455"), + "disconnectedAt": None, + "connectedAt": datetime(2023, 11, 1), + "isInProgress": True, + "createdAt": datetime(2023, 11, 1), + "updatedAt": datetime(2023, 11, 1), + "__v": 0, + } + ) + + analyzer = RnDaoAnalyzer(guild_id) user = os.getenv("MONGODB_USER", "") password = os.getenv("MONGODB_PASS", "") diff --git a/tests/integration/utils/mock_graph.py b/tests/integration/utils/mock_graph.py index d693d78..2473db0 100644 --- a/tests/integration/utils/mock_graph.py +++ b/tests/integration/utils/mock_graph.py @@ -80,7 +80,7 @@ def store_mock_data_in_neo4j(graph_dict, guildId, community_id): neo4j_creds["password"] = os.getenv("NEO4J_PASSWORD") neo4j_creds["user"] = os.getenv("NEO4J_USER") - analyzer = RnDaoAnalyzer(community_id) + analyzer = RnDaoAnalyzer(guildId) analyzer.set_mongo_database_info( mongo_db_host=host, @@ -93,16 +93,15 @@ def store_mock_data_in_neo4j(graph_dict, guildId, community_id): guilds_data = {} - guilds_data[guildId] = { - "heatmaps": None, - "memberactivities": ( - None, - graph_dict, - ), - } + guilds_data["heatmaps"] = None + guilds_data["memberactivities"] = ( + None, + graph_dict, + ) analyzer.DB_connections.store_analytics_data( analytics_data=guilds_data, + guild_id=guildId, community_id=community_id, remove_heatmaps=False, remove_memberactivities=False, diff --git a/tests/integration/utils/remove_and_setup_guild.py b/tests/integration/utils/remove_and_setup_guild.py index 885ef6d..4f1d15d 100644 --- a/tests/integration/utils/remove_and_setup_guild.py +++ b/tests/integration/utils/remove_and_setup_guild.py @@ -8,6 +8,7 @@ def setup_db_guild( db_access: DB_access, + platform_id: str, guildId: str = "1234", discordId_list: list[str] = ["973993299281076285"], discordId_isbot: list[bool] = [False], @@ -22,8 +23,6 @@ def setup_db_guild( `discordId_isbot` is representative if each user is bot or not """ - platform_id = "515151515151515151515151" - db_access.db_mongo_client["Core"]["platforms"].delete_one( {"_id": ObjectId(platform_id)} ) From a5d4c751b748ac9787b1561a425578cc8245646c Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Mon, 20 May 2024 18:08:15 +0330 Subject: [PATCH 23/48] fix: more test cases based on updates! --- discord_analyzer/rn_analyzer.py | 8 +-- tests/integration/test_all_active_period.py | 2 +- .../test_all_joined_day_members.py | 2 +- tests/integration/test_analyzer_init.py | 25 +++----- ...iod_1year_recompute_available_analytics.py | 2 +- ..._period_1year_recompute_empty_analytics.py | 2 +- ...riod_1year_run_once_available_analytics.py | 2 +- ...r_period_1year_run_once_empty_analytics.py | 2 +- .../test_analyzer_period_35days_run_once.py | 2 +- ...od_3month_recompute_available_analytics.py | 2 +- ...period_3month_recompute_empty_analytics.py | 2 +- ...iod_3month_run_once_available_analytics.py | 2 +- ..._period_3month_run_once_empty_analytics.py | 2 +- ...od_6month_recompute_available_analytics.py | 2 +- ...period_6month_recompute_empty_analytics.py | 2 +- ...iod_6month_run_once_available_analytics.py | 2 +- ..._period_6month_run_once_empty_analytics.py | 2 +- ...iod_month_recompute_available_analytics.py | 2 +- ..._period_month_recompute_empty_analytics.py | 2 +- ...riod_month_run_once_available_analytics.py | 2 +- ...r_period_month_run_once_empty_analytics.py | 2 +- ...riod_week_recompute_available_analytics.py | 2 +- ...r_period_week_recompute_empty_analytics.py | 2 +- ...eriod_week_run_once_available_analytics.py | 2 +- ...er_period_week_run_once_empty_analytics.py | 2 +- .../test_assess_engagement_mention.py | 10 +--- .../test_assess_engagement_reactions.py | 10 +--- .../test_assess_engagement_replies.py | 10 +--- tests/integration/test_exclude_bots.py | 2 +- ..._generated_graph_period_1_year_run_once.py | 14 +++-- .../test_generated_graph_period_1year.py | 14 +++-- .../test_generated_graph_period_35_days.py | 14 +++-- ...generated_graph_period_35_days_run_once.py | 14 +++-- .../test_generated_graph_period_3_months.py | 15 +++-- ...enerated_graph_period_3_months_run_once.py | 14 +++-- .../test_generated_graph_period_6_months.py | 14 +++-- ...enerated_graph_period_6_months_run_once.py | 14 +++-- .../test_get_guild_community_ids.py | 3 +- ...est_member_activities_action_all_active.py | 12 ++-- ...member_activities_action_all_activities.py | 4 +- ...member_activity_from_start_no_past_data.py | 2 +- ...rom_start_with_guild_heatmaps_available.py | 2 +- ...t_with_guild_memberactivities_available.py | 2 +- ...r_activity_from_start_with_one_interval.py | 2 +- .../integration/test_member_activity_utils.py | 2 +- .../test_memberactivities_mentions.py | 4 +- .../test_memberactivities_reaction.py | 4 +- .../test_memberactivities_reply.py | 4 +- ...t_mentioned_active_members_from_message.py | 2 +- tests/integration/utils/analyzer_setup.py | 59 ------------------- .../utils/remove_and_setup_guild.py | 4 +- tests/unit/test_example.py | 57 ------------------ utils/get_guild_utils.py | 4 +- 53 files changed, 150 insertions(+), 241 deletions(-) delete mode 100644 tests/unit/test_example.py diff --git a/discord_analyzer/rn_analyzer.py b/discord_analyzer/rn_analyzer.py index 8a503cb..742dfd4 100644 --- a/discord_analyzer/rn_analyzer.py +++ b/discord_analyzer/rn_analyzer.py @@ -10,7 +10,7 @@ class RnDaoAnalyzer(AnalyzerDBManager): """ RnDaoAnalyzer - class that handles database connection and data analysis + class that handles database connections and data analysis """ def __init__(self, guild_id: str, testing=False): @@ -18,14 +18,14 @@ def __init__(self, guild_id: str, testing=False): Class initiation function """ """ Testing, prevents from data upload""" + logging.basicConfig() + logging.getLogger().setLevel(logging.INFO) + self.testing = testing self.guild_object = Guild(guild_id) self.guild_id = guild_id self.community_id = self.guild_object.get_community_id() - logging.basicConfig() - logging.getLogger().setLevel(logging.INFO) - def setup_neo4j_metrics(self) -> None: """ setup the neo4j analytics wrapper diff --git a/tests/integration/test_all_active_period.py b/tests/integration/test_all_active_period.py index 32dc540..eed5a55 100644 --- a/tests/integration/test_all_active_period.py +++ b/tests/integration/test_all_active_period.py @@ -117,7 +117,7 @@ def test_two_weeks_period_active_members(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_all_joined_day_members.py b/tests/integration/test_all_joined_day_members.py index 45dae32..9b21c2c 100644 --- a/tests/integration/test_all_joined_day_members.py +++ b/tests/integration/test_all_joined_day_members.py @@ -63,7 +63,7 @@ def test_all_joined_day_members(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() cursor = db_access.db_mongo_client[guildId]["memberactivities"].find([]) diff --git a/tests/integration/test_analyzer_init.py b/tests/integration/test_analyzer_init.py index 5c9ee06..c3ddd77 100644 --- a/tests/integration/test_analyzer_init.py +++ b/tests/integration/test_analyzer_init.py @@ -2,27 +2,16 @@ from analyzer_init import AnalyzerInit from bson.objectid import ObjectId -from pymongo import MongoClient -from utils.daolytics_uitls import get_mongo_credentials +from utils.get_mongo_client import MongoSingleton def test_analyzer_init(): - community_id = "aabbccddeeff001122334455" - - guildId = "1234" - analyzer = AnalyzerInit(guildId) platform_id = "515151515151515151515151" days_ago_period = 30 - mongo_creds = get_mongo_credentials() - user = mongo_creds["user"] - password = mongo_creds["password"] - host = mongo_creds["host"] - port = mongo_creds["port"] - - url = f"mongodb://{user}:{password}@{host}:{port}" - - mongo_client: MongoClient = MongoClient(url) + community_id = "aabbccddeeff001122334455" + guildId = "1234" + mongo_client = MongoSingleton.get_instance().get_client() mongo_client["Core"]["platforms"].delete_one({"metadata.id": guildId}) mongo_client.drop_database(guildId) @@ -68,6 +57,8 @@ def test_analyzer_init(): } ) + analyzer = AnalyzerInit(guildId) + mongo_client[guildId]["guildmembers"].insert_one( { "discordId": "user1", @@ -107,9 +98,9 @@ def test_analyzer_init(): mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - tc_discord_analyzer, mongo_creds = analyzer.get_analyzer() + tc_discord_analyzer, _ = analyzer.get_analyzer() - tc_discord_analyzer.recompute_analytics(guildId) + tc_discord_analyzer.recompute_analytics() heatmaps_data = mongo_client[guildId]["heatmaps"].find_one({}) assert heatmaps_data is not None diff --git a/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py b/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py index 3b26751..3454fb2 100644 --- a/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py @@ -76,7 +76,7 @@ def test_analyzer_one_year_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py index fff0adc..0c086fe 100644 --- a/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py @@ -56,7 +56,7 @@ def test_analyzer_one_year_period_recompute_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py b/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py index efeb209..1c83b85 100644 --- a/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py @@ -76,7 +76,7 @@ def test_analyzer_one_year_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py index 3dadfa3..c8865a0 100644 --- a/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py @@ -56,7 +56,7 @@ def test_analyzer_one_year_period_run_once_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_35days_run_once.py b/tests/integration/test_analyzer_period_35days_run_once.py index 72c455d..5fa4ca8 100644 --- a/tests/integration/test_analyzer_period_35days_run_once.py +++ b/tests/integration/test_analyzer_period_35days_run_once.py @@ -79,7 +79,7 @@ def test_analyzer_40days_period_run_once_available_analytics_overlapping_period( db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py b/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py index 6ed037e..0853632 100644 --- a/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py @@ -76,7 +76,7 @@ def test_analyzer_three_month_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py index 3ea7903..6820ab2 100644 --- a/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py @@ -56,7 +56,7 @@ def test_analyzer_three_month_period_recompute_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py b/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py index 46c43c5..0fbea02 100644 --- a/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py @@ -76,7 +76,7 @@ def test_analyzer_three_month_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py index e5f8fd6..c5b9131 100644 --- a/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py @@ -56,7 +56,7 @@ def test_analyzer_three_month_period_run_once_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py b/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py index 623f210..91837ed 100644 --- a/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py @@ -76,7 +76,7 @@ def test_analyzer_six_month_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py index 16415e7..20ddf02 100644 --- a/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py @@ -56,7 +56,7 @@ def test_analyzer_six_month_period_recompute_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py b/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py index d8703d4..0f8ffbc 100644 --- a/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py @@ -76,7 +76,7 @@ def test_analyzer_six_month_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py index a2aa516..8feb260 100644 --- a/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py @@ -56,7 +56,7 @@ def test_analyzer_six_month_period_run_once_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_month_recompute_available_analytics.py b/tests/integration/test_analyzer_period_month_recompute_available_analytics.py index 03a10da..b32902e 100644 --- a/tests/integration/test_analyzer_period_month_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_month_recompute_available_analytics.py @@ -75,7 +75,7 @@ def test_analyzer_month_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py index dd41400..1cf6e2d 100644 --- a/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py @@ -55,7 +55,7 @@ def test_analyzer_month_period_recompute_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_month_run_once_available_analytics.py b/tests/integration/test_analyzer_period_month_run_once_available_analytics.py index 2f71a80..8cc38a8 100644 --- a/tests/integration/test_analyzer_period_month_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_month_run_once_available_analytics.py @@ -75,7 +75,7 @@ def test_analyzer_month_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py index d753c88..a135f0c 100644 --- a/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py @@ -55,7 +55,7 @@ def test_analyzer_month_period_run_once_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_week_recompute_available_analytics.py b/tests/integration/test_analyzer_period_week_recompute_available_analytics.py index 7c2b723..42bbf53 100644 --- a/tests/integration/test_analyzer_period_week_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_week_recompute_available_analytics.py @@ -71,7 +71,7 @@ def test_analyzer_week_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_cursor = db_access.query_db_find("memberactivities", {}) diff --git a/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py index 1bc3691..13d4394 100644 --- a/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py @@ -55,7 +55,7 @@ def test_analyzer_week_period_recompute_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_cursor = db_access.db_mongo_client[guildId][ diff --git a/tests/integration/test_analyzer_period_week_run_once_available_analytics.py b/tests/integration/test_analyzer_period_week_run_once_available_analytics.py index bc5b726..57ed9f9 100644 --- a/tests/integration/test_analyzer_period_week_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_week_run_once_available_analytics.py @@ -70,7 +70,7 @@ def test_analyzer_week_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py index 178a4e8..db07be5 100644 --- a/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py @@ -54,7 +54,7 @@ def test_analyzer_week_period_run_once_empty_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/test_assess_engagement_mention.py b/tests/integration/test_assess_engagement_mention.py index 24a16f4..005890a 100644 --- a/tests/integration/test_assess_engagement_mention.py +++ b/tests/integration/test_assess_engagement_mention.py @@ -38,14 +38,10 @@ def heatmaps_analytics(self): heatmaps = Heatmaps(DB_connections=self.db_connections, testing=False) heatmaps_data = heatmaps.analysis_heatmap(guildId=self.guildId, from_start=True) analytics_data = {} - analytics_data[f"{self.guildId}"] = { - "heatmaps": heatmaps_data, - "memberactivities": ( - None, - None, - ), - } + analytics_data["heatmaps"] = heatmaps_data + analytics_data["memberactivities"] = (None, None) self.db_connections.store_analytics_data( + guild_id=self.guildId, analytics_data=analytics_data, community_id="123", remove_memberactivities=False, diff --git a/tests/integration/test_assess_engagement_reactions.py b/tests/integration/test_assess_engagement_reactions.py index 2ca9a9a..3a54342 100644 --- a/tests/integration/test_assess_engagement_reactions.py +++ b/tests/integration/test_assess_engagement_reactions.py @@ -38,14 +38,10 @@ def heatmaps_analytics(self): heatmaps = Heatmaps(DB_connections=self.db_connections, testing=False) heatmaps_data = heatmaps.analysis_heatmap(guildId=self.guildId, from_start=True) analytics_data = {} - analytics_data[f"{self.guildId}"] = { - "heatmaps": heatmaps_data, - "memberactivities": ( - None, - None, - ), - } + analytics_data["heatmaps"] = heatmaps_data + analytics_data["memberactivities"] = (None, None) self.db_connections.store_analytics_data( + guild_id=self.guildId, analytics_data=analytics_data, community_id="123", remove_memberactivities=False, diff --git a/tests/integration/test_assess_engagement_replies.py b/tests/integration/test_assess_engagement_replies.py index f59ecf3..8f6e4c9 100644 --- a/tests/integration/test_assess_engagement_replies.py +++ b/tests/integration/test_assess_engagement_replies.py @@ -38,14 +38,10 @@ def heatmaps_analytics(self): heatmaps = Heatmaps(DB_connections=self.db_connections, testing=False) heatmaps_data = heatmaps.analysis_heatmap(guildId=self.guildId, from_start=True) analytics_data = {} - analytics_data[f"{self.guildId}"] = { - "heatmaps": heatmaps_data, - "memberactivities": ( - None, - None, - ), - } + analytics_data["heatmaps"] = heatmaps_data + analytics_data["memberactivities"] = (None, None) self.db_connections.store_analytics_data( + guild_id=self.guildId, analytics_data=analytics_data, community_id="123", remove_memberactivities=False, diff --git a/tests/integration/test_exclude_bots.py b/tests/integration/test_exclude_bots.py index 8ed858a..e2c606b 100644 --- a/tests/integration/test_exclude_bots.py +++ b/tests/integration/test_exclude_bots.py @@ -67,7 +67,7 @@ def test_excluding_bots_heatmaps(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() db_access.db_mongo_client[guildId] diff --git a/tests/integration/test_generated_graph_period_1_year_run_once.py b/tests/integration/test_generated_graph_period_1_year_run_once.py index 4586712..875616c 100644 --- a/tests/integration/test_generated_graph_period_1_year_run_once.py +++ b/tests/integration/test_generated_graph_period_1_year_run_once.py @@ -17,6 +17,7 @@ def test_networkgraph_one_year_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + community_id = "aabbccddeeff001122334455" platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -33,7 +34,12 @@ def test_networkgraph_one_year_period_run_once_available_analytics(): ] setup_db_guild( - db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=360 + db_access, + platform_id, + guildId, + discordId_list=acc_id, + days_ago_period=360, + community_id=community_id, ) db_access.db_mongo_client[guildId].create_collection("heatmaps") @@ -84,7 +90,7 @@ def test_networkgraph_one_year_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() results = neo4j_ops.gds.run_cypher( @@ -114,9 +120,9 @@ def test_networkgraph_one_year_period_run_once_available_analytics(): f""" MATCH (g:Guild {{guildId: '{guildId}'}}) - -[r:IS_WITHIN]-> (c:Community {{id: '1234555'}}) + -[r:IS_WITHIN]-> (c:Community {{id: '{community_id}'}}) RETURN c.id as cid """ ) assert len(results.values) == 1 - assert results["cid"].values == ["1234555"] + assert results["cid"].values == [community_id] diff --git a/tests/integration/test_generated_graph_period_1year.py b/tests/integration/test_generated_graph_period_1year.py index 330b326..db62ac2 100644 --- a/tests/integration/test_generated_graph_period_1year.py +++ b/tests/integration/test_generated_graph_period_1year.py @@ -17,6 +17,7 @@ def test_networkgraph_one_year_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + community_id = "aabbccddeeff001122334455" platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -33,7 +34,12 @@ def test_networkgraph_one_year_period_recompute_available_analytics(): ] setup_db_guild( - db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=360 + db_access, + platform_id, + guildId, + discordId_list=acc_id, + days_ago_period=360, + community_id=community_id, ) db_access.db_mongo_client[guildId].create_collection("heatmaps") @@ -84,7 +90,7 @@ def test_networkgraph_one_year_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() results = neo4j_ops.gds.run_cypher( @@ -112,9 +118,9 @@ def test_networkgraph_one_year_period_recompute_available_analytics(): f""" MATCH (g:Guild {{guildId: '{guildId}'}}) - -[r:IS_WITHIN]-> (c:Community {{id: '1234555'}}) + -[r:IS_WITHIN]-> (c:Community {{id: '{community_id}'}}) RETURN c.id as cid """ ) assert len(results.values) == 1 - assert results["cid"].values == ["1234555"] + assert results["cid"].values == [community_id] diff --git a/tests/integration/test_generated_graph_period_35_days.py b/tests/integration/test_generated_graph_period_35_days.py index 94f16c7..f05709c 100644 --- a/tests/integration/test_generated_graph_period_35_days.py +++ b/tests/integration/test_generated_graph_period_35_days.py @@ -18,6 +18,7 @@ def test_networkgraph_35_days_period_recompute_available_analytics(): # first create the collections guildId = "1234" platform_id = "515151515151515151515151" + community_id = "aabbccddeeff001122334455" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -33,7 +34,12 @@ def test_networkgraph_35_days_period_recompute_available_analytics(): ] setup_db_guild( - db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=35 + db_access, + platform_id, + guildId, + discordId_list=acc_id, + days_ago_period=35, + community_id=community_id, ) db_access.db_mongo_client[guildId].create_collection("heatmaps") @@ -84,7 +90,7 @@ def test_networkgraph_35_days_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() results = neo4j_ops.gds.run_cypher( @@ -112,9 +118,9 @@ def test_networkgraph_35_days_period_recompute_available_analytics(): f""" MATCH (g:Guild {{guildId: '{guildId}'}}) - -[r:IS_WITHIN]-> (c:Community {{id: '1234555'}}) + -[r:IS_WITHIN]-> (c:Community {{id: '{community_id}'}}) RETURN c.id as cid """ ) assert len(results.values) == 1 - assert results["cid"].values == ["1234555"] + assert results["cid"].values == [community_id] diff --git a/tests/integration/test_generated_graph_period_35_days_run_once.py b/tests/integration/test_generated_graph_period_35_days_run_once.py index b775551..41b8f62 100644 --- a/tests/integration/test_generated_graph_period_35_days_run_once.py +++ b/tests/integration/test_generated_graph_period_35_days_run_once.py @@ -18,6 +18,7 @@ def test_networkgraph_35_days_period_run_once_available_analytics(): # first create the collections guildId = "1234" platform_id = "515151515151515151515151" + community_id = "aabbccddeeff001122334455" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -33,7 +34,12 @@ def test_networkgraph_35_days_period_run_once_available_analytics(): ] setup_db_guild( - db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=35 + db_access, + platform_id, + guildId, + discordId_list=acc_id, + days_ago_period=35, + community_id=community_id, ) db_access.db_mongo_client[guildId].create_collection("heatmaps") @@ -84,7 +90,7 @@ def test_networkgraph_35_days_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() results = neo4j_ops.gds.run_cypher( @@ -113,9 +119,9 @@ def test_networkgraph_35_days_period_run_once_available_analytics(): f""" MATCH (g:Guild {{guildId: '{guildId}'}}) - -[r:IS_WITHIN]-> (c:Community {{id: '1234555'}}) + -[r:IS_WITHIN]-> (c:Community {{id: '{community_id}'}}) RETURN c.id as cid """ ) assert len(results.values) == 1 - assert results["cid"].values == ["1234555"] + assert results["cid"].values == [community_id] diff --git a/tests/integration/test_generated_graph_period_3_months.py b/tests/integration/test_generated_graph_period_3_months.py index 2642116..bd170e7 100644 --- a/tests/integration/test_generated_graph_period_3_months.py +++ b/tests/integration/test_generated_graph_period_3_months.py @@ -17,6 +17,7 @@ def test_networkgraph_three_months_period_recompute_available_analytics(): """ # first create the collections guildId = "1234" + community_id = "aabbccddeeff001122334455" platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -31,9 +32,13 @@ def test_networkgraph_three_months_period_recompute_available_analytics(): "973993299281076285", "973993299281076286", ] - setup_db_guild( - db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=90 + db_access, + platform_id, + guildId, + discordId_list=acc_id, + days_ago_period=90, + community_id=community_id, ) db_access.db_mongo_client[guildId].create_collection("heatmaps") @@ -84,7 +89,7 @@ def test_networkgraph_three_months_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() results = neo4j_ops.gds.run_cypher( @@ -112,9 +117,9 @@ def test_networkgraph_three_months_period_recompute_available_analytics(): f""" MATCH (g:Guild {{guildId: '{guildId}'}}) - -[r:IS_WITHIN]-> (c:Community {{id: '1234555'}}) + -[r:IS_WITHIN]-> (c:Community {{id: '{community_id}'}}) RETURN c.id as cid """ ) assert len(results.values) == 1 - assert results["cid"].values == ["1234555"] + assert results["cid"].values == [community_id] diff --git a/tests/integration/test_generated_graph_period_3_months_run_once.py b/tests/integration/test_generated_graph_period_3_months_run_once.py index 5c242a6..5e4b134 100644 --- a/tests/integration/test_generated_graph_period_3_months_run_once.py +++ b/tests/integration/test_generated_graph_period_3_months_run_once.py @@ -18,6 +18,7 @@ def test_networkgraph_three_months_period_run_once_available_analytics(): # first create the collections guildId = "1234" platform_id = "515151515151515151515151" + community_id = "aabbccddeeff001122334455" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -33,7 +34,12 @@ def test_networkgraph_three_months_period_run_once_available_analytics(): ] setup_db_guild( - db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=90 + db_access, + platform_id, + guildId, + discordId_list=acc_id, + days_ago_period=90, + community_id=community_id, ) db_access.db_mongo_client[guildId].create_collection("heatmaps") @@ -84,7 +90,7 @@ def test_networkgraph_three_months_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() results = neo4j_ops.gds.run_cypher( @@ -113,9 +119,9 @@ def test_networkgraph_three_months_period_run_once_available_analytics(): f""" MATCH (g:Guild {{guildId: '{guildId}'}}) - -[r:IS_WITHIN]-> (c:Community {{id: '1234555'}}) + -[r:IS_WITHIN]-> (c:Community {{id: '{community_id}'}}) RETURN c.id as cid """ ) assert len(results.values) == 1 - assert results["cid"].values == ["1234555"] + assert results["cid"].values == [community_id] diff --git a/tests/integration/test_generated_graph_period_6_months.py b/tests/integration/test_generated_graph_period_6_months.py index 8736514..01ee33c 100644 --- a/tests/integration/test_generated_graph_period_6_months.py +++ b/tests/integration/test_generated_graph_period_6_months.py @@ -18,6 +18,7 @@ def test_networkgraph_six_months_period_recompute_available_analytics(): # first create the collections guildId = "1234" platform_id = "515151515151515151515151" + community_id = "aabbccddeeff001122334455" db_access = launch_db_access(guildId) neo4j_ops = neo4j_setup() @@ -33,7 +34,12 @@ def test_networkgraph_six_months_period_recompute_available_analytics(): ] setup_db_guild( - db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=180 + db_access, + platform_id, + guildId, + discordId_list=acc_id, + days_ago_period=180, + community_id=community_id, ) db_access.db_mongo_client[guildId].create_collection("heatmaps") @@ -84,7 +90,7 @@ def test_networkgraph_six_months_period_recompute_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() results = neo4j_ops.gds.run_cypher( @@ -112,9 +118,9 @@ def test_networkgraph_six_months_period_recompute_available_analytics(): f""" MATCH (g:Guild {{guildId: '{guildId}'}}) - -[r:IS_WITHIN]-> (c:Community {{id: '1234555'}}) + -[r:IS_WITHIN]-> (c:Community {{id: '{community_id}'}}) RETURN c.id as cid """ ) assert len(results.values) == 1 - assert results["cid"].values == ["1234555"] + assert results["cid"].values == [community_id] diff --git a/tests/integration/test_generated_graph_period_6_months_run_once.py b/tests/integration/test_generated_graph_period_6_months_run_once.py index f0e1daf..e76f635 100644 --- a/tests/integration/test_generated_graph_period_6_months_run_once.py +++ b/tests/integration/test_generated_graph_period_6_months_run_once.py @@ -17,6 +17,7 @@ def test_networkgraph_six_months_period_run_once_available_analytics(): """ # first create the collections guildId = "1234" + community_id = "aabbccddeeff001122334455" platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) @@ -34,7 +35,12 @@ def test_networkgraph_six_months_period_run_once_available_analytics(): ] setup_db_guild( - db_access, platform_id, guildId, discordId_list=acc_id, days_ago_period=180 + db_access, + platform_id, + guildId, + discordId_list=acc_id, + days_ago_period=90, + community_id=community_id, ) db_access.db_mongo_client[guildId].create_collection("heatmaps") @@ -85,7 +91,7 @@ def test_networkgraph_six_months_period_run_once_available_analytics(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() results = neo4j_ops.gds.run_cypher( @@ -115,9 +121,9 @@ def test_networkgraph_six_months_period_run_once_available_analytics(): f""" MATCH (g:Guild {{guildId: '{guildId}'}}) - -[r:IS_WITHIN]-> (c:Community {{id: '1234555'}}) + -[r:IS_WITHIN]-> (c:Community {{id: '{community_id}'}}) RETURN c.id as cid """ ) assert len(results.values) == 1 - assert results["cid"].values == ["1234555"] + assert results["cid"].values == [community_id] diff --git a/tests/integration/test_get_guild_community_ids.py b/tests/integration/test_get_guild_community_ids.py index 69e8a7c..0b6fe44 100644 --- a/tests/integration/test_get_guild_community_ids.py +++ b/tests/integration/test_get_guild_community_ids.py @@ -41,9 +41,8 @@ def test_get_avalable_guild(self): } ) - guild_id, community_id = get_guild_community_ids(str(platform_id)) + guild_id = get_guild_community_ids(str(platform_id)) self.assertEqual(guild_id, "999888877766655") - self.assertEqual(community_id, "aabbccddeeff001122334455") def test_no_document_raise_error(self): client = MongoSingleton.get_instance().client diff --git a/tests/integration/test_member_activities_action_all_active.py b/tests/integration/test_member_activities_action_all_active.py index e63031f..0a791d7 100644 --- a/tests/integration/test_member_activities_action_all_active.py +++ b/tests/integration/test_member_activities_action_all_active.py @@ -46,8 +46,8 @@ def test_single_user_action(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer(self.guildId, platform_id) - analyzer.recompute_analytics(self.guildId) + analyzer = setup_analyzer(self.guildId) + analyzer.recompute_analytics() cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, {"_id": 0, "all_active": 1} ) @@ -96,8 +96,8 @@ def test_lone_msg_action(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer(self.guildId, platform_id) - analyzer.recompute_analytics(self.guildId) + analyzer = setup_analyzer(self.guildId) + analyzer.recompute_analytics() cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, {"_id": 0, "all_active": 1} ) @@ -145,8 +145,8 @@ def test_thr_message_action(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer(self.guildId, platform_id) - analyzer.recompute_analytics(self.guildId) + analyzer = setup_analyzer(self.guildId) + analyzer.recompute_analytics() cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, {"_id": 0, "all_active": 1, "date": 1} ) diff --git a/tests/integration/test_member_activities_action_all_activities.py b/tests/integration/test_member_activities_action_all_activities.py index 6ad3c0f..51b6dec 100644 --- a/tests/integration/test_member_activities_action_all_activities.py +++ b/tests/integration/test_member_activities_action_all_activities.py @@ -66,8 +66,8 @@ def test_single_user_action(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer(self.guildId, platform_id) - analyzer.recompute_analytics(self.guildId) + analyzer = setup_analyzer(self.guildId) + analyzer.recompute_analytics() cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, { diff --git a/tests/integration/test_member_activity_from_start_no_past_data.py b/tests/integration/test_member_activity_from_start_no_past_data.py index 8c46931..2c886da 100644 --- a/tests/integration/test_member_activity_from_start_no_past_data.py +++ b/tests/integration/test_member_activity_from_start_no_past_data.py @@ -94,7 +94,7 @@ def test_analyzer_member_activities_from_start_empty_memberactivities(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_data = db_access.db_mongo_client[guildId][ diff --git a/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py b/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py index 22e336e..e237bae 100644 --- a/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py +++ b/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py @@ -59,7 +59,7 @@ def test_analyzer_member_activities_from_start_available_heatmaps(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_data = db_access.db_mongo_client[guildId][ diff --git a/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py b/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py index 6cfac87..21a614b 100644 --- a/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py +++ b/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py @@ -53,7 +53,7 @@ def test_analyzer_member_activities_from_start_available_member_activity(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_data = db_access.db_mongo_client[guildId][ diff --git a/tests/integration/test_member_activity_from_start_with_one_interval.py b/tests/integration/test_member_activity_from_start_with_one_interval.py index c066701..1a35c2b 100644 --- a/tests/integration/test_member_activity_from_start_with_one_interval.py +++ b/tests/integration/test_member_activity_from_start_with_one_interval.py @@ -44,7 +44,7 @@ def test_analyzer_from_start_one_interval(): db_access.db_mongo_client[guildId].create_collection("heatmaps") db_access.db_mongo_client[guildId].create_collection("memberactivities") - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.recompute_analytics() memberactivities_data = db_access.db_mongo_client[guildId][ diff --git a/tests/integration/test_member_activity_utils.py b/tests/integration/test_member_activity_utils.py index f6b8822..18edc9c 100644 --- a/tests/integration/test_member_activity_utils.py +++ b/tests/integration/test_member_activity_utils.py @@ -7,7 +7,7 @@ def test_utils_get_members(): - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) platform_id = "515151515151515151515151" guildId = "1012430565959553145" db_access = launch_db_access(guildId) diff --git a/tests/integration/test_memberactivities_mentions.py b/tests/integration/test_memberactivities_mentions.py index c30efed..f31f816 100644 --- a/tests/integration/test_memberactivities_mentions.py +++ b/tests/integration/test_memberactivities_mentions.py @@ -63,8 +63,8 @@ def test_single_user_interaction(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer(self.guildId, platform_id) - analyzer.recompute_analytics(self.guildId) + analyzer = setup_analyzer(self.guildId) + analyzer.recompute_analytics() cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, { diff --git a/tests/integration/test_memberactivities_reaction.py b/tests/integration/test_memberactivities_reaction.py index ff94b19..e4b95db 100644 --- a/tests/integration/test_memberactivities_reaction.py +++ b/tests/integration/test_memberactivities_reaction.py @@ -66,8 +66,8 @@ def test_single_user_action(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer(self.guildId, platform_id) - analyzer.recompute_analytics(self.guildId) + analyzer = setup_analyzer(self.guildId) + analyzer.recompute_analytics() cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, { diff --git a/tests/integration/test_memberactivities_reply.py b/tests/integration/test_memberactivities_reply.py index 62d0b0a..e18c0f0 100644 --- a/tests/integration/test_memberactivities_reply.py +++ b/tests/integration/test_memberactivities_reply.py @@ -63,8 +63,8 @@ def test_single_user_interaction(self): self.db_access.db_mongo_client[self.guildId]["rawinfos"].insert_many( rawinfo_samples ) - analyzer = setup_analyzer(self.guildId, platform_id) - analyzer.recompute_analytics(self.guildId) + analyzer = setup_analyzer(self.guildId) + analyzer.recompute_analytics() cursor = self.db_access.db_mongo_client[self.guildId]["memberactivities"].find( {}, { diff --git a/tests/integration/test_mentioned_active_members_from_message.py b/tests/integration/test_mentioned_active_members_from_message.py index 6a6543a..f14db77 100644 --- a/tests/integration/test_mentioned_active_members_from_message.py +++ b/tests/integration/test_mentioned_active_members_from_message.py @@ -52,7 +52,7 @@ def test_mention_active_members_from_rawinfo(): db_access.db_mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - analyzer = setup_analyzer(guildId, platform_id) + analyzer = setup_analyzer(guildId) analyzer.run_once() memberactivities_cursor = db_access.query_db_find( diff --git a/tests/integration/utils/analyzer_setup.py b/tests/integration/utils/analyzer_setup.py index f974134..f7d3d09 100644 --- a/tests/integration/utils/analyzer_setup.py +++ b/tests/integration/utils/analyzer_setup.py @@ -1,74 +1,15 @@ import os -from datetime import datetime -from bson import ObjectId from discord_analyzer.DB_operations.mongodb_access import DB_access from discord_analyzer.rn_analyzer import RnDaoAnalyzer from dotenv import load_dotenv -from utils.get_mongo_client import MongoSingleton def setup_analyzer( guild_id: str, - platform_id: str, ) -> RnDaoAnalyzer: load_dotenv() - client = MongoSingleton.get_instance().get_client() - - client["Core"].drop_collection("platforms") - - act_param = { - "INT_THR": 1, - "UW_DEG_THR": 1, - "PAUSED_T_THR": 1, - "CON_T_THR": 4, - "CON_O_THR": 3, - "EDGE_STR_THR": 5, - "UW_THR_DEG_THR": 5, - "VITAL_T_THR": 4, - "VITAL_O_THR": 3, - "STILL_T_THR": 2, - "STILL_O_THR": 2, - "DROP_H_THR": 2, - "DROP_I_THR": 1, - } - window = { - "period_size": 7, - "step_size": 1, - } - - client["Core"]["platforms"].insert_one( - { - "_id": ObjectId(platform_id), - "name": "discord", - "metadata": { - "id": guild_id, - "icon": "111111111111111111111111", - "name": "A guild", - "selectedChannels": [ - "11111111", - "22222222", - "33333333", - "44444444", - "55555555", - "66666666", - "77777777", - ], - "window": window, - "action": act_param, - "period": datetime(2023, 6, 1), - }, - "community": ObjectId("aabbccddeeff001122334455"), - "disconnectedAt": None, - "connectedAt": datetime(2023, 11, 1), - "isInProgress": True, - "createdAt": datetime(2023, 11, 1), - "updatedAt": datetime(2023, 11, 1), - "__v": 0, - } - ) - analyzer = RnDaoAnalyzer(guild_id) user = os.getenv("MONGODB_USER", "") diff --git a/tests/integration/utils/remove_and_setup_guild.py b/tests/integration/utils/remove_and_setup_guild.py index 4f1d15d..20105cd 100644 --- a/tests/integration/utils/remove_and_setup_guild.py +++ b/tests/integration/utils/remove_and_setup_guild.py @@ -22,7 +22,9 @@ def setup_db_guild( it then create the guildmembers collection in it `discordId_isbot` is representative if each user is bot or not + `community_id` can be passed in kwargs. default is `aabbccddeeff001122334455` """ + community_id = kwargs.get("community_id", "aabbccddeeff001122334455") db_access.db_mongo_client["Core"]["platforms"].delete_one( {"_id": ObjectId(platform_id)} ) @@ -60,7 +62,7 @@ def setup_db_guild( "action": action, "period": datetime.now() - timedelta(days=days_ago_period), }, - "community": ObjectId("aabbccddeeff001122334455"), + "community": ObjectId(community_id), "disconnectedAt": None, "connectedAt": (datetime.now() - timedelta(days=days_ago_period + 10)), "isInProgress": True, diff --git a/tests/unit/test_example.py b/tests/unit/test_example.py deleted file mode 100644 index 1af8c0d..0000000 --- a/tests/unit/test_example.py +++ /dev/null @@ -1,57 +0,0 @@ -import os - -from discord_analyzer import RnDaoAnalyzer -from dotenv import load_dotenv - - -def test_mongo_db_info_set(): - community_id = "4321" - analyzer = RnDaoAnalyzer(community_id) - load_dotenv() - - port = 1234 - host = "http://www.google.com" - # to ignore gitleaks - password = os.getenv("MONGODB_PASS") - user = "sample_user" - - analyzer.set_mongo_database_info( - mongo_db_host=host, - mongo_db_password=password, - mongo_db_user=user, - mongo_db_port=port, - ) - assert analyzer.mongo_host == host - assert analyzer.mongo_pass == password - assert analyzer.mongo_user == user - assert analyzer.mongo_port == port - - -def test_neo4j_db_info_set(): - load_dotenv() - port = 1234 - db_name = "db" - protocol = "bolt" - user = "user" - host = "localhost" - # to ignore gitleaks - password = os.getenv("NEO4J_PASSWORD") - neo4j_creds = { - "db_name": db_name, - "password": password, - "port": port, - "protocol": protocol, - "host": host, - "user": user, - } - - community_id = "4321" - analyzer = RnDaoAnalyzer(community_id) - analyzer.set_neo4j_database_info(neo4j_creds=neo4j_creds) - - assert analyzer.neo4j_port == port - assert analyzer.neo4j_host == host - assert analyzer.neo4j_protocol == protocol - assert analyzer.neo4j_db_name == db_name - assert analyzer.neo4j_password == password - assert analyzer.neo4j_user == user diff --git a/utils/get_guild_utils.py b/utils/get_guild_utils.py index 728a7ce..88cd3ed 100644 --- a/utils/get_guild_utils.py +++ b/utils/get_guild_utils.py @@ -2,7 +2,7 @@ from utils.get_mongo_client import MongoSingleton -def get_guild_community_ids(platform_id: str) -> tuple[str, str]: +def get_guild_community_ids(platform_id: str) -> str: """ get both the guild id and community from the platform id @@ -15,8 +15,6 @@ def get_guild_community_ids(platform_id: str) -> tuple[str, str]: -------- guild_id : str the discord guild id for that specific platform - community_id : str - the community id that the guild is related """ mongo_client = MongoSingleton.get_instance().client From 4ff3cf175bdfe723e791f1e9950425d3f73b46b5 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 21 May 2024 09:33:26 +0330 Subject: [PATCH 24/48] fix: test case! the guild platform had to be added to db before running the script and now it is done. --- tests/integration/test_member_activity_utils.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_member_activity_utils.py b/tests/integration/test_member_activity_utils.py index 18edc9c..5023847 100644 --- a/tests/integration/test_member_activity_utils.py +++ b/tests/integration/test_member_activity_utils.py @@ -7,11 +7,14 @@ def test_utils_get_members(): - analyzer = setup_analyzer(guildId) platform_id = "515151515151515151515151" guildId = "1012430565959553145" - db_access = launch_db_access(guildId) users = ["973993299281076285"] + db_access = launch_db_access(guildId) + setup_db_guild( + db_access, platform_id, guildId, discordId_list=users, days_ago_period=7 + ) + analyzer = setup_analyzer(guildId) setup_db_guild(db_access, platform_id, guildId, discordId_list=users) From 68760e95d9a801fa6b65ed06b759d20500f27acc Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 21 May 2024 09:41:53 +0330 Subject: [PATCH 25/48] fix: isort linter issue! --- discord_analyzer/rn_analyzer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/discord_analyzer/rn_analyzer.py b/discord_analyzer/rn_analyzer.py index 742dfd4..f4a519f 100644 --- a/discord_analyzer/rn_analyzer.py +++ b/discord_analyzer/rn_analyzer.py @@ -2,8 +2,8 @@ from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps from discord_analyzer.analyzer.analyzer_memberactivities import MemberActivities -from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics +from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from discord_analyzer.analyzer.utils.guild import Guild From 11049d87134c2f264a33bdf96f0d3201c8029912 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 21 May 2024 15:18:08 +0330 Subject: [PATCH 26/48] fix: more cleaning for redis, and mongo clients! --- analyzer_init.py | 13 +--- automation/utils/automation_base.py | 9 ++- automation/utils/model.py | 2 +- discord_analyzer/analyzer/utils/guild.py | 2 +- discord_utils.py | 64 ++++++------------- requirements.txt | 8 +-- server.py | 39 +++-------- tests/integration/test_analyzer_init.py | 4 +- .../test_assess_engagement_mention.py | 2 +- .../test_assess_engagement_reactions.py | 2 +- .../test_assess_engagement_replies.py | 2 +- .../test_automation_db_load_from_db.py | 2 +- .../test_automation_db_save_to_db.py | 2 +- .../test_get_guild_community_ids.py | 2 +- .../integration/test_get_guild_platform_id.py | 2 +- tests/integration/test_publish_on_success.py | 2 +- .../test_rawinfo_webhook_fetching.py | 2 +- tests/integration/test_service_connection.py | 2 +- tests/unit/test_creds_loading.py | 18 +----- tests/unit/test_mongo_singleton.py | 2 +- utils/{daolytics_uitls.py => credentials.py} | 14 ---- utils/get_guild_utils.py | 2 +- utils/get_rabbitmq.py | 24 ------- utils/{get_mongo_client.py => mongo.py} | 2 +- utils/rabbitmq.py | 42 ++++++++++++ utils/redis.py | 39 +++++++++++ utils/sentryio_service.py | 9 +-- worker.py | 2 +- 28 files changed, 144 insertions(+), 171 deletions(-) rename utils/{daolytics_uitls.py => credentials.py} (91%) delete mode 100644 utils/get_rabbitmq.py rename utils/{get_mongo_client.py => mongo.py} (94%) create mode 100644 utils/rabbitmq.py create mode 100644 utils/redis.py diff --git a/analyzer_init.py b/analyzer_init.py index b93568d..081c9b3 100644 --- a/analyzer_init.py +++ b/analyzer_init.py @@ -1,10 +1,9 @@ from typing import Any from discord_analyzer import RnDaoAnalyzer -from utils.daolytics_uitls import ( +from utils.credentials import ( get_mongo_credentials, get_neo4j_credentials, - get_saga_db_location, ) @@ -17,23 +16,17 @@ class AnalyzerInit: def __init__(self, guild_id: str) -> None: self.guild_id = guild_id - def get_analyzer(self) -> tuple[RnDaoAnalyzer, dict[str, Any]]: + def get_analyzer(self) -> RnDaoAnalyzer: """ Returns: --------- analyzer : RnDaoAnalyzer - mongo_creds : dict[str, Any] """ analyzer = RnDaoAnalyzer(self.guild_id) # credentials mongo_creds = get_mongo_credentials() neo4j_creds = get_neo4j_credentials() - saga_mongo_location = get_saga_db_location() - - mongo_creds["db_name"] = saga_mongo_location["db_name"] - mongo_creds["collection_name"] = saga_mongo_location["collection_name"] - mongo_creds["connection_str"] = self._get_mongo_connection(mongo_creds) analyzer.set_mongo_database_info( mongo_db_host=mongo_creds["host"], @@ -45,7 +38,7 @@ def get_analyzer(self) -> tuple[RnDaoAnalyzer, dict[str, Any]]: analyzer.database_connect() analyzer.setup_neo4j_metrics() - return analyzer, mongo_creds + return analyzer def _get_mongo_connection(self, mongo_creds: dict[str, Any]): user = mongo_creds["user"] diff --git a/automation/utils/automation_base.py b/automation/utils/automation_base.py index 41e28f3..bccc619 100644 --- a/automation/utils/automation_base.py +++ b/automation/utils/automation_base.py @@ -2,8 +2,8 @@ from typing import Any from uuid import uuid1 -from utils.get_mongo_client import MongoSingleton -from utils.get_rabbitmq import prepare_rabbit_mq +from utils.mongo import MongoSingleton +from utils.rabbitmq import RabbitMQSingleton class AutomationBase: @@ -11,9 +11,8 @@ def __init__(self) -> None: """ utilities for automation workflow """ - mongo_singleton = MongoSingleton.get_instance() - self.mongo_client = mongo_singleton.get_client() - self.rabbitmq = prepare_rabbit_mq() + self.mongo_client = MongoSingleton.get_instance().get_client() + self.rabbitmq = RabbitMQSingleton.get_instance().get_client() def _get_users_from_guildmembers( self, guild_id: str, user_ids: list[str], strategy: str = "ngu" diff --git a/automation/utils/model.py b/automation/utils/model.py index 4456bc4..1f4dc09 100644 --- a/automation/utils/model.py +++ b/automation/utils/model.py @@ -1,5 +1,5 @@ from utils.get_automation_env import get_automations_env -from utils.get_mongo_client import MongoSingleton +from utils.mongo import MongoSingleton from .interfaces import Automation diff --git a/discord_analyzer/analyzer/utils/guild.py b/discord_analyzer/analyzer/utils/guild.py index 30e03c6..5220a9d 100644 --- a/discord_analyzer/analyzer/utils/guild.py +++ b/discord_analyzer/analyzer/utils/guild.py @@ -1,4 +1,4 @@ -from utils.get_mongo_client import MongoSingleton +from utils.mongo import MongoSingleton class Guild: diff --git a/discord_utils.py b/discord_utils.py index 6b2d357..e78f855 100644 --- a/discord_utils.py +++ b/discord_utils.py @@ -4,22 +4,15 @@ from analyzer_init import AnalyzerInit from automation.automation_workflow import AutomationWorkflow from tc_messageBroker.rabbit_mq.saga.saga_base import get_saga -from utils.daolytics_uitls import get_mongo_credentials, get_saga_db_location +from utils.credentials import get_mongo_credentials from utils.get_guild_utils import get_guild_community_ids -from utils.get_rabbitmq import prepare_rabbit_mq +from utils.rabbitmq import RabbitMQSingleton from utils.transactions_ordering import sort_transactions -def analyzer_recompute(sagaId: str, rabbit_creds: dict[str, Any]): - mongo_creds = get_mongo_credentials() - saga_mongo_location = get_saga_db_location() +def analyzer_recompute(sagaId: str): - saga = get_saga_instance( - sagaId=sagaId, - connection=mongo_creds["connection_str"], - saga_db=saga_mongo_location["db_name"], - saga_collection=saga_mongo_location["collection_name"], - ) + saga = get_saga_instance(sagaId=sagaId) if saga is None: logging.warn( f"Warn: Saga not found!, stopping the recompute for sagaId: {sagaId}" @@ -30,7 +23,7 @@ def analyzer_recompute(sagaId: str, rabbit_creds: dict[str, Any]): logging.info("Initializing the analyzer") analyzer_init = AnalyzerInit(guildId) - analyzer, mongo_creds = analyzer_init.get_analyzer() + analyzer = analyzer_init.get_analyzer() logging.info("Analyzer initialized") def recompute_wrapper(**kwargs): @@ -44,22 +37,12 @@ def publish_wrapper(**kwargs): saga.next( publish_method=publish_wrapper, call_function=recompute_wrapper, - mongo_creds=mongo_creds, ) - return rabbit_creds, sagaId, mongo_creds - - -def analyzer_run_once(sagaId: str, rabbit_creds: dict[str, Any]): - mongo_creds = get_mongo_credentials() - saga_mongo_location = get_saga_db_location() + return sagaId - saga = get_saga_instance( - sagaId=sagaId, - connection=mongo_creds["connection_str"], - saga_db=saga_mongo_location["db_name"], - saga_collection=saga_mongo_location["collection_name"], - ) +def analyzer_run_once(sagaId: str): + saga = get_saga_instance(sagaId=sagaId) if saga is None: logging.warn(f"Saga not found!, stopping the run_once for sagaId: {sagaId}") else: @@ -67,7 +50,7 @@ def analyzer_run_once(sagaId: str, rabbit_creds: dict[str, Any]): guildId = get_guild_community_ids(platform_id) analyzer_init = AnalyzerInit(guildId) - analyzer, mongo_creds = analyzer_init.get_analyzer() + analyzer = analyzer_init.get_analyzer() def run_once_wrapper(**kwargs): analyzer.run_once() @@ -78,38 +61,29 @@ def publish_wrapper(**kwargs): saga.next( publish_method=publish_wrapper, call_function=run_once_wrapper, - mongo_creds=mongo_creds, ) - return rabbit_creds, sagaId, mongo_creds + return sagaId -def get_saga_instance(sagaId: str, connection: str, saga_db: str, saga_collection: str): +def get_saga_instance(sagaId: str): + mongo_creds = get_mongo_credentials() + saga = get_saga( sagaId=sagaId, - connection_url=connection, - db_name=saga_db, - collection=saga_collection, + connection_url=mongo_creds["connection_str"], + db_name="Saga", + collection="sagas", ) return saga def publish_on_success(connection, result, *args, **kwargs): - # we must get these three things try: - # rabbitmq creds - # TODO: remove sending it in future - _ = args[0][0] - sagaId = args[0][1] - mongo_creds = args[0][2] + sagaId = args[0][0] logging.info(f"SAGAID: {sagaId}: ON_SUCCESS callback! ") - saga = get_saga_instance( - sagaId=sagaId, - connection=mongo_creds["connection_str"], - saga_db=mongo_creds["db_name"], - saga_collection=mongo_creds["collection_name"], - ) - rabbitmq = prepare_rabbit_mq() + saga = get_saga_instance(sagaId=sagaId) + rabbitmq = RabbitMQSingleton.get_instance().get_client() transactions = saga.choreography.transactions diff --git a/requirements.txt b/requirements.txt index 069bc46..26dc353 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ pymongo==4.3.3 pytest==7.2.0 python-dateutil==2.8.2 pytz==2022.7.1 -python-dotenv==0.21.1 +python-dotenv>=1.0.0 six==1.16.0 tomli==2.0.1 networkx==3.1 @@ -18,11 +18,11 @@ pytest-cov==4.0.0 coverage==7.2.5 python-dateutil==2.8.2 tqdm -tc-messageBroker==1.4.0 +tc-messageBroker==1.6.7 sentry-sdk rq redis -tc-core-analyzer-lib==1.3.0 -tc-neo4j-lib==1.0.0 +tc-core-analyzer-lib==1.3.1 +tc-neo4j-lib==1.0.2 pybars3 backoff==2.2.1 diff --git a/server.py b/server.py index 8e22e77..c1d8154 100644 --- a/server.py +++ b/server.py @@ -10,14 +10,10 @@ from pika.exceptions import AMQPConnectionError, ConnectionClosedByBroker from redis import Redis from rq import Queue as RQ_Queue -from tc_messageBroker.message_broker import RabbitMQ +from utils.rabbitmq import RabbitMQSingleton from tc_messageBroker.rabbit_mq.event import Event from tc_messageBroker.rabbit_mq.queue import Queue -from utils.daolytics_uitls import ( - get_rabbit_mq_credentials, - get_redis_credentials, - get_sentryio_service_creds, -) +from utils.redis import RedisSingleton from utils.sentryio_service import set_up_sentryio @@ -28,34 +24,19 @@ max_time=60 * 60 * 3, ) def analyzer(): - rabbit_mq_creds = get_rabbit_mq_credentials() - sentry_creds = get_sentryio_service_creds() - # sentryio service - set_up_sentryio(sentry_creds["dsn"], sentry_creds["env"]) - redis_creds = get_redis_credentials() - - rabbit_mq = RabbitMQ( - broker_url=rabbit_mq_creds["broker_url"], - port=rabbit_mq_creds["port"], - username=rabbit_mq_creds["username"], - password=rabbit_mq_creds["password"], - ) - - redis = Redis( - host=redis_creds["host"], - port=redis_creds["port"], - password=redis_creds["pass"], - ) + set_up_sentryio() + rabbit_mq = RabbitMQSingleton.get_instance().get_client() + redis = RedisSingleton.get_instance().get_client() # 24 hours equal to 86400 seconds rq_queue = RQ_Queue(connection=redis, default_timeout=86400) analyzer_recompute = functools.partial( - recompute_wrapper, redis_queue=rq_queue, rabbit_mq_creds=rabbit_mq_creds + recompute_wrapper, redis_queue=rq_queue ) analyzer_run_once = functools.partial( - run_once_wrapper, redis_queue=rq_queue, rabbit_mq_creds=rabbit_mq_creds + run_once_wrapper, redis_queue=rq_queue ) rabbit_mq.connect(Queue.DISCORD_ANALYZER, heartbeat=60) @@ -71,7 +52,7 @@ def analyzer(): def recompute_wrapper( - body: dict[str, Any], redis_queue: RQ_Queue, rabbit_mq_creds: dict[str, Any] + body: dict[str, Any], redis_queue: RQ_Queue ): sagaId = body["content"]["uuid"] logging.info(f"SAGAID:{sagaId} recompute job Adding to queue") @@ -79,20 +60,18 @@ def recompute_wrapper( redis_queue.enqueue( analyzer_recompute, sagaId=sagaId, - rabbit_creds=rabbit_mq_creds, on_success=publish_on_success, ) def run_once_wrapper( - body: dict[str, Any], redis_queue: RQ_Queue, rabbit_mq_creds: dict[str, Any] + body: dict[str, Any], redis_queue: RQ_Queue ): sagaId = body["content"]["uuid"] logging.info(f"SAGAID:{sagaId} run_once job Adding to queue") redis_queue.enqueue( analyzer_run_once, sagaId=sagaId, - rabbit_creds=rabbit_mq_creds, on_success=publish_on_success, ) diff --git a/tests/integration/test_analyzer_init.py b/tests/integration/test_analyzer_init.py index c3ddd77..8f9951e 100644 --- a/tests/integration/test_analyzer_init.py +++ b/tests/integration/test_analyzer_init.py @@ -2,7 +2,7 @@ from analyzer_init import AnalyzerInit from bson.objectid import ObjectId -from utils.get_mongo_client import MongoSingleton +from utils.mongo import MongoSingleton def test_analyzer_init(): @@ -98,7 +98,7 @@ def test_analyzer_init(): mongo_client[guildId]["rawinfos"].insert_many(rawinfo_samples) - tc_discord_analyzer, _ = analyzer.get_analyzer() + tc_discord_analyzer = analyzer.get_analyzer() tc_discord_analyzer.recompute_analytics() diff --git a/tests/integration/test_assess_engagement_mention.py b/tests/integration/test_assess_engagement_mention.py index 005890a..e97c35c 100644 --- a/tests/integration/test_assess_engagement_mention.py +++ b/tests/integration/test_assess_engagement_mention.py @@ -5,7 +5,7 @@ from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from tc_core_analyzer_lib.utils.activity import DiscordActivity -from utils.daolytics_uitls import get_mongo_credentials, get_neo4j_credentials +from utils.credentials import get_mongo_credentials, get_neo4j_credentials from .utils.analyzer_setup import launch_db_access from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_assess_engagement_reactions.py b/tests/integration/test_assess_engagement_reactions.py index 3a54342..cc79e92 100644 --- a/tests/integration/test_assess_engagement_reactions.py +++ b/tests/integration/test_assess_engagement_reactions.py @@ -5,7 +5,7 @@ from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from tc_core_analyzer_lib.utils.activity import DiscordActivity -from utils.daolytics_uitls import get_mongo_credentials, get_neo4j_credentials +from utils.credentials import get_mongo_credentials, get_neo4j_credentials from .utils.analyzer_setup import launch_db_access from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_assess_engagement_replies.py b/tests/integration/test_assess_engagement_replies.py index 8f6e4c9..d52c00a 100644 --- a/tests/integration/test_assess_engagement_replies.py +++ b/tests/integration/test_assess_engagement_replies.py @@ -5,7 +5,7 @@ from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from tc_core_analyzer_lib.utils.activity import DiscordActivity -from utils.daolytics_uitls import get_mongo_credentials, get_neo4j_credentials +from utils.credentials import get_mongo_credentials, get_neo4j_credentials from .utils.analyzer_setup import launch_db_access from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_automation_db_load_from_db.py b/tests/integration/test_automation_db_load_from_db.py index 213bd60..64fc96a 100644 --- a/tests/integration/test_automation_db_load_from_db.py +++ b/tests/integration/test_automation_db_load_from_db.py @@ -4,7 +4,7 @@ from automation.utils.model import AutomationDB from dotenv import load_dotenv -from utils.get_mongo_client import MongoSingleton +from utils.mongo import MongoSingleton class TestAutomationDBLoadFromDB(unittest.TestCase): diff --git a/tests/integration/test_automation_db_save_to_db.py b/tests/integration/test_automation_db_save_to_db.py index dd3fa64..607e772 100644 --- a/tests/integration/test_automation_db_save_to_db.py +++ b/tests/integration/test_automation_db_save_to_db.py @@ -5,7 +5,7 @@ from automation.utils.interfaces import Automation from automation.utils.model import AutomationDB from dotenv import load_dotenv -from utils.get_mongo_client import MongoSingleton +from utils.mongo import MongoSingleton class TestAutomationDBSaveToDB(unittest.TestCase): diff --git a/tests/integration/test_get_guild_community_ids.py b/tests/integration/test_get_guild_community_ids.py index 0b6fe44..c85168e 100644 --- a/tests/integration/test_get_guild_community_ids.py +++ b/tests/integration/test_get_guild_community_ids.py @@ -3,7 +3,7 @@ from bson.objectid import ObjectId from utils.get_guild_utils import get_guild_community_ids -from utils.get_mongo_client import MongoSingleton +from utils.mongo import MongoSingleton class TestGetGuildId(TestCase): diff --git a/tests/integration/test_get_guild_platform_id.py b/tests/integration/test_get_guild_platform_id.py index c55c9b0..5542639 100644 --- a/tests/integration/test_get_guild_platform_id.py +++ b/tests/integration/test_get_guild_platform_id.py @@ -3,7 +3,7 @@ from bson.objectid import ObjectId from utils.get_guild_utils import get_guild_platform_id -from utils.get_mongo_client import MongoSingleton +from utils.mongo import MongoSingleton class TestGetGuildId(TestCase): diff --git a/tests/integration/test_publish_on_success.py b/tests/integration/test_publish_on_success.py index de647e9..a8c5ccc 100644 --- a/tests/integration/test_publish_on_success.py +++ b/tests/integration/test_publish_on_success.py @@ -10,7 +10,7 @@ from bson.objectid import ObjectId from discord_utils import publish_on_success from dotenv import load_dotenv -from utils.daolytics_uitls import get_mongo_credentials +from utils.credentials import get_mongo_credentials from .utils.analyzer_setup import launch_db_access diff --git a/tests/integration/test_rawinfo_webhook_fetching.py b/tests/integration/test_rawinfo_webhook_fetching.py index 681cff9..d74e4ae 100644 --- a/tests/integration/test_rawinfo_webhook_fetching.py +++ b/tests/integration/test_rawinfo_webhook_fetching.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta from discord_analyzer.models.RawInfoModel import RawInfoModel -from utils.get_mongo_client import MongoSingleton +from utils.mongo import MongoSingleton def test_rawinfo_get_day_entry_empty_data(): diff --git a/tests/integration/test_service_connection.py b/tests/integration/test_service_connection.py index 5e42f64..1d4c4e7 100644 --- a/tests/integration/test_service_connection.py +++ b/tests/integration/test_service_connection.py @@ -1,5 +1,5 @@ from tc_messageBroker.message_broker import RabbitMQ -from utils.daolytics_uitls import get_rabbit_mq_credentials +from utils.credentials import get_rabbit_mq_credentials def test_rabbit_mq_connect(): diff --git a/tests/unit/test_creds_loading.py b/tests/unit/test_creds_loading.py index 2819fb7..f78a866 100644 --- a/tests/unit/test_creds_loading.py +++ b/tests/unit/test_creds_loading.py @@ -1,9 +1,8 @@ -from utils.daolytics_uitls import ( +from utils.credentials import ( get_mongo_credentials, get_neo4j_credentials, get_rabbit_mq_credentials, get_redis_credentials, - get_saga_db_location, get_sentryio_service_creds, ) @@ -92,21 +91,6 @@ def test_redis_creds_values(): assert redis_creds["port"] is not None assert redis_creds["host"] is not None - -def test_saga_location(): - saga_creds = get_saga_db_location() - - assert "db_name" in saga_creds.keys() - assert "collection_name" in saga_creds.keys() - - -def test_saga_location_values(): - saga_creds = get_saga_db_location() - - assert saga_creds["db_name"] is not None - assert saga_creds["collection_name"] is not None - - def test_sentryio_creds(): sentry_creds = get_sentryio_service_creds() diff --git a/tests/unit/test_mongo_singleton.py b/tests/unit/test_mongo_singleton.py index 518c02e..9734592 100644 --- a/tests/unit/test_mongo_singleton.py +++ b/tests/unit/test_mongo_singleton.py @@ -1,7 +1,7 @@ import unittest from pymongo import MongoClient -from utils.get_mongo_client import MongoSingleton +from utils.mongo import MongoSingleton class TestMongoSingleton(unittest.TestCase): diff --git a/utils/daolytics_uitls.py b/utils/credentials.py similarity index 91% rename from utils/daolytics_uitls.py rename to utils/credentials.py index 01df88d..74b1d0a 100644 --- a/utils/daolytics_uitls.py +++ b/utils/credentials.py @@ -92,20 +92,6 @@ def get_neo4j_credentials(): return neo4j_creds -def get_saga_db_location(): - """ - get the saga location in database - """ - load_dotenv() - - saga_db = {} - - saga_db["db_name"] = os.getenv("SAGA_DB_NAME") - saga_db["collection_name"] = os.getenv("SAGA_DB_COLLECTION") - - return saga_db - - def get_sentryio_service_creds(): load_dotenv() diff --git a/utils/get_guild_utils.py b/utils/get_guild_utils.py index 88cd3ed..34f16c8 100644 --- a/utils/get_guild_utils.py +++ b/utils/get_guild_utils.py @@ -1,5 +1,5 @@ from bson.objectid import ObjectId -from utils.get_mongo_client import MongoSingleton +from utils.mongo import MongoSingleton def get_guild_community_ids(platform_id: str) -> str: diff --git a/utils/get_rabbitmq.py b/utils/get_rabbitmq.py deleted file mode 100644 index 3a34727..0000000 --- a/utils/get_rabbitmq.py +++ /dev/null @@ -1,24 +0,0 @@ -from tc_messageBroker import RabbitMQ -from tc_messageBroker.rabbit_mq.queue import Queue -from utils.daolytics_uitls import get_rabbit_mq_credentials - - -def prepare_rabbit_mq(): - """ - Prepare connection to rabbitMQ - - Returns: - ---------- - rabbitmq : tc_messageBroker.RabbitMQ - an instance connected to broker - """ - rabbit_creds = get_rabbit_mq_credentials() - rabbitmq = RabbitMQ( - broker_url=rabbit_creds["broker_url"], - port=rabbit_creds["port"], - username=rabbit_creds["username"], - password=rabbit_creds["password"], - ) - rabbitmq.connect(queue_name=Queue.DISCORD_ANALYZER) - - return rabbitmq diff --git a/utils/get_mongo_client.py b/utils/mongo.py similarity index 94% rename from utils/get_mongo_client.py rename to utils/mongo.py index 5e19664..9d67566 100644 --- a/utils/get_mongo_client.py +++ b/utils/mongo.py @@ -2,7 +2,7 @@ from typing import Any from pymongo import MongoClient -from utils.daolytics_uitls import get_mongo_credentials +from utils.credentials import get_mongo_credentials class MongoSingleton: diff --git a/utils/rabbitmq.py b/utils/rabbitmq.py new file mode 100644 index 0000000..1e41a5f --- /dev/null +++ b/utils/rabbitmq.py @@ -0,0 +1,42 @@ +import logging + +from tc_messageBroker import RabbitMQ +from tc_messageBroker.rabbit_mq.queue import Queue +from utils.credentials import get_rabbit_mq_credentials + + +class RabbitMQSingleton: + __instance = None + + def __init__(self): + if RabbitMQSingleton.__instance is not None: + raise Exception("This class is a singleton!") + else: + creds = get_rabbit_mq_credentials() + self.client = self.create_rabbitmq_client(creds) + RabbitMQSingleton.__instance = self + + @staticmethod + def get_instance(): + if RabbitMQSingleton.__instance is None: + try: + RabbitMQSingleton() + logging.info(f"RabbitMQ broker Connected Successfully! Ping returned: {info}") + except Exception as exp: + logging.error(f"RabbitMQ broker not connected! exp: {exp}") + + return RabbitMQSingleton.__instance + + def get_client(self): + return self.client + + def create_rabbitmq_client(self, rabbit_creds: dict[str, str]): + rabbitmq = RabbitMQ( + broker_url=rabbit_creds["broker_url"], + port=rabbit_creds["port"], + username=rabbit_creds["username"], + password=rabbit_creds["password"], + ) + rabbitmq.connect(queue_name=Queue.DISCORD_ANALYZER) + + return rabbitmq \ No newline at end of file diff --git a/utils/redis.py b/utils/redis.py new file mode 100644 index 0000000..c7d9a33 --- /dev/null +++ b/utils/redis.py @@ -0,0 +1,39 @@ +import logging + +import redis +from utils.credentials import get_redis_credentials + + +class RedisSingleton: + __instance = None + + def __init__(self): + if RedisSingleton.__instance is not None: + raise Exception("This class is a singleton!") + else: + creds = get_redis_credentials() + self.client = self.create_redis_client(creds) + RedisSingleton.__instance = self + + @staticmethod + def get_instance(): + if RedisSingleton.__instance is None: + RedisSingleton() + try: + info = RedisSingleton.__instance.client.ping() + logging.info(f"Redis Connected Successfully! Ping returned: {info}") + except Exception as exp: + logging.error(f"Redis not connected! exp: {exp}") + + return RedisSingleton.__instance + + def get_client(self): + return self.client + + def create_redis_client(self, redis_creds: dict[str, str]): + return redis.Redis( + host=redis_creds["host"], + port=int(redis_creds["port"]), + password=redis_creds["pass"], + decode_responses=True, + ) \ No newline at end of file diff --git a/utils/sentryio_service.py b/utils/sentryio_service.py index 422c9ac..647cffc 100644 --- a/utils/sentryio_service.py +++ b/utils/sentryio_service.py @@ -1,10 +1,11 @@ import sentry_sdk +from utils.credentials import get_sentryio_service_creds - -def set_up_sentryio(dsn, environment, sample_rate=1.0): +def set_up_sentryio(sample_rate=1.0): + sentry_creds = get_sentryio_service_creds() sentry_sdk.init( - dsn=dsn, - environment=environment, + dsn=sentry_creds["dsn"], + environment=sentry_creds["env"], # Set traces_sample_rate to 1.0 to capture 100% # of transactions for performance monitoring. # We recommend adjusting this value in production. diff --git a/worker.py b/worker.py index 01fae69..f13b94e 100644 --- a/worker.py +++ b/worker.py @@ -2,7 +2,7 @@ import redis from rq import Worker -from utils.daolytics_uitls import get_redis_credentials +from utils.credentials import get_redis_credentials def worker_exception_handler(job, exc_type, exc_value, traceback): From 8429bc2009da5b191b30647595edc4b649343b1e Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 21 May 2024 15:25:31 +0330 Subject: [PATCH 27/48] fix: wrong variable call! --- utils/rabbitmq.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/rabbitmq.py b/utils/rabbitmq.py index 1e41a5f..b3d2017 100644 --- a/utils/rabbitmq.py +++ b/utils/rabbitmq.py @@ -21,7 +21,7 @@ def get_instance(): if RabbitMQSingleton.__instance is None: try: RabbitMQSingleton() - logging.info(f"RabbitMQ broker Connected Successfully! Ping returned: {info}") + logging.info("RabbitMQ broker Connected Successfully!") except Exception as exp: logging.error(f"RabbitMQ broker not connected! exp: {exp}") From 5d607aa9a41bf83f4f681f833741936fc22c5e83 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 21 May 2024 15:39:13 +0330 Subject: [PATCH 28/48] fix: black linter issue! --- discord_analyzer/analysis/neo4j_metrics.py | 7 +++---- discord_utils.py | 1 + server.py | 17 +++++------------ tests/unit/test_creds_loading.py | 1 + utils/rabbitmq.py | 2 +- utils/redis.py | 2 +- utils/sentryio_service.py | 1 + 7 files changed, 13 insertions(+), 18 deletions(-) diff --git a/discord_analyzer/analysis/neo4j_metrics.py b/discord_analyzer/analysis/neo4j_metrics.py index b23d6b3..8750476 100644 --- a/discord_analyzer/analysis/neo4j_metrics.py +++ b/discord_analyzer/analysis/neo4j_metrics.py @@ -206,10 +206,9 @@ def decenterialization_score(neo4j_analytics, centrality_scores): results_degreeCenterality["score_undirected"] = results_degreeCenterality["score"] # normalizing undirected scores - results_degreeCenterality[ - "normalized_score_undirected" - ] = results_degreeCenterality["score"] / sum( - results_degreeCenterality["score"].values > 0 + results_degreeCenterality["normalized_score_undirected"] = ( + results_degreeCenterality["score"] + / sum(results_degreeCenterality["score"].values > 0) ) # the normalization over positive score_out results_degreeCenterality["normalized_score_out"] = results_degreeCenterality[ diff --git a/discord_utils.py b/discord_utils.py index e78f855..262f5e1 100644 --- a/discord_utils.py +++ b/discord_utils.py @@ -41,6 +41,7 @@ def publish_wrapper(**kwargs): return sagaId + def analyzer_run_once(sagaId: str): saga = get_saga_instance(sagaId=sagaId) if saga is None: diff --git a/server.py b/server.py index c1d8154..56d8874 100644 --- a/server.py +++ b/server.py @@ -1,6 +1,7 @@ """ start the project using rabbitMQ """ + import functools import logging from typing import Any @@ -32,12 +33,8 @@ def analyzer(): # 24 hours equal to 86400 seconds rq_queue = RQ_Queue(connection=redis, default_timeout=86400) - analyzer_recompute = functools.partial( - recompute_wrapper, redis_queue=rq_queue - ) - analyzer_run_once = functools.partial( - run_once_wrapper, redis_queue=rq_queue - ) + analyzer_recompute = functools.partial(recompute_wrapper, redis_queue=rq_queue) + analyzer_run_once = functools.partial(run_once_wrapper, redis_queue=rq_queue) rabbit_mq.connect(Queue.DISCORD_ANALYZER, heartbeat=60) @@ -51,9 +48,7 @@ def analyzer(): rabbit_mq.channel.start_consuming() -def recompute_wrapper( - body: dict[str, Any], redis_queue: RQ_Queue -): +def recompute_wrapper(body: dict[str, Any], redis_queue: RQ_Queue): sagaId = body["content"]["uuid"] logging.info(f"SAGAID:{sagaId} recompute job Adding to queue") @@ -64,9 +59,7 @@ def recompute_wrapper( ) -def run_once_wrapper( - body: dict[str, Any], redis_queue: RQ_Queue -): +def run_once_wrapper(body: dict[str, Any], redis_queue: RQ_Queue): sagaId = body["content"]["uuid"] logging.info(f"SAGAID:{sagaId} run_once job Adding to queue") redis_queue.enqueue( diff --git a/tests/unit/test_creds_loading.py b/tests/unit/test_creds_loading.py index f78a866..745b1c0 100644 --- a/tests/unit/test_creds_loading.py +++ b/tests/unit/test_creds_loading.py @@ -91,6 +91,7 @@ def test_redis_creds_values(): assert redis_creds["port"] is not None assert redis_creds["host"] is not None + def test_sentryio_creds(): sentry_creds = get_sentryio_service_creds() diff --git a/utils/rabbitmq.py b/utils/rabbitmq.py index b3d2017..452c69a 100644 --- a/utils/rabbitmq.py +++ b/utils/rabbitmq.py @@ -39,4 +39,4 @@ def create_rabbitmq_client(self, rabbit_creds: dict[str, str]): ) rabbitmq.connect(queue_name=Queue.DISCORD_ANALYZER) - return rabbitmq \ No newline at end of file + return rabbitmq diff --git a/utils/redis.py b/utils/redis.py index c7d9a33..6bd2ddb 100644 --- a/utils/redis.py +++ b/utils/redis.py @@ -36,4 +36,4 @@ def create_redis_client(self, redis_creds: dict[str, str]): port=int(redis_creds["port"]), password=redis_creds["pass"], decode_responses=True, - ) \ No newline at end of file + ) diff --git a/utils/sentryio_service.py b/utils/sentryio_service.py index 647cffc..bf4da9e 100644 --- a/utils/sentryio_service.py +++ b/utils/sentryio_service.py @@ -1,6 +1,7 @@ import sentry_sdk from utils.credentials import get_sentryio_service_creds + def set_up_sentryio(sample_rate=1.0): sentry_creds = get_sentryio_service_creds() sentry_sdk.init( From f67715ec0d8239a3d0674070a9b760abad87a69a Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 21 May 2024 16:01:53 +0330 Subject: [PATCH 29/48] fix: isort linter issues! --- analyzer_init.py | 5 +---- discord_utils.py | 1 - server.py | 3 +-- 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/analyzer_init.py b/analyzer_init.py index 081c9b3..52a9555 100644 --- a/analyzer_init.py +++ b/analyzer_init.py @@ -1,10 +1,7 @@ from typing import Any from discord_analyzer import RnDaoAnalyzer -from utils.credentials import ( - get_mongo_credentials, - get_neo4j_credentials, -) +from utils.credentials import get_mongo_credentials, get_neo4j_credentials class AnalyzerInit: diff --git a/discord_utils.py b/discord_utils.py index 262f5e1..685a500 100644 --- a/discord_utils.py +++ b/discord_utils.py @@ -1,5 +1,4 @@ import logging -from typing import Any from analyzer_init import AnalyzerInit from automation.automation_workflow import AutomationWorkflow diff --git a/server.py b/server.py index 56d8874..830e01f 100644 --- a/server.py +++ b/server.py @@ -9,11 +9,10 @@ import backoff from discord_utils import analyzer_recompute, analyzer_run_once, publish_on_success from pika.exceptions import AMQPConnectionError, ConnectionClosedByBroker -from redis import Redis from rq import Queue as RQ_Queue -from utils.rabbitmq import RabbitMQSingleton from tc_messageBroker.rabbit_mq.event import Event from tc_messageBroker.rabbit_mq.queue import Queue +from utils.rabbitmq import RabbitMQSingleton from utils.redis import RedisSingleton from utils.sentryio_service import set_up_sentryio From 8b380e2f5be6fd48575be9a57879b0a2e18cbe23 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Tue, 21 May 2024 16:59:25 +0330 Subject: [PATCH 30/48] fix: black linter issue! --- discord_analyzer/analysis/neo4j_metrics.py | 7 ++++--- discord_utils.py | 1 - 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/discord_analyzer/analysis/neo4j_metrics.py b/discord_analyzer/analysis/neo4j_metrics.py index 8750476..b23d6b3 100644 --- a/discord_analyzer/analysis/neo4j_metrics.py +++ b/discord_analyzer/analysis/neo4j_metrics.py @@ -206,9 +206,10 @@ def decenterialization_score(neo4j_analytics, centrality_scores): results_degreeCenterality["score_undirected"] = results_degreeCenterality["score"] # normalizing undirected scores - results_degreeCenterality["normalized_score_undirected"] = ( - results_degreeCenterality["score"] - / sum(results_degreeCenterality["score"].values > 0) + results_degreeCenterality[ + "normalized_score_undirected" + ] = results_degreeCenterality["score"] / sum( + results_degreeCenterality["score"].values > 0 ) # the normalization over positive score_out results_degreeCenterality["normalized_score_out"] = results_degreeCenterality[ diff --git a/discord_utils.py b/discord_utils.py index 685a500..f51dd35 100644 --- a/discord_utils.py +++ b/discord_utils.py @@ -10,7 +10,6 @@ def analyzer_recompute(sagaId: str): - saga = get_saga_instance(sagaId=sagaId) if saga is None: logging.warn( From 27e398025029bb42a354b31976997bc6de6db254 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 22 May 2024 10:05:51 +0330 Subject: [PATCH 31/48] fix: test case with latest code updates! --- discord_utils.py | 7 +++++-- tests/integration/test_publish_on_success.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/discord_utils.py b/discord_utils.py index f51dd35..2ae3cb3 100644 --- a/discord_utils.py +++ b/discord_utils.py @@ -73,13 +73,17 @@ def get_saga_instance(sagaId: str): db_name="Saga", collection="sagas", ) + if saga is None: + raise ValueError(f"Saga with sagaId: {sagaId} not found!") + return saga def publish_on_success(connection, result, *args, **kwargs): try: sagaId = args[0][0] - logging.info(f"SAGAID: {sagaId}: ON_SUCCESS callback! ") + msg = f"GUILDID: {guildId}: " + logging.info(f"{msg}SAGAID: {sagaId}: ON_SUCCESS callback! ") saga = get_saga_instance(sagaId=sagaId) rabbitmq = RabbitMQSingleton.get_instance().get_client() @@ -91,7 +95,6 @@ def publish_on_success(connection, result, *args, **kwargs): platform_id = saga.data["platformId"] guildId = get_guild_community_ids(platform_id) - msg = f"GUILDID: {guildId}: " if tx_not_started_count != 0: tx = transactions_ordered[0] diff --git a/tests/integration/test_publish_on_success.py b/tests/integration/test_publish_on_success.py index a8c5ccc..875b9ba 100644 --- a/tests/integration/test_publish_on_success.py +++ b/tests/integration/test_publish_on_success.py @@ -330,7 +330,7 @@ def test_publish_on_success_check_notification_choreographies(): "collection_name": saga_collection, } - sample_args_data = ["sample", saga_id, mongo_creds] + sample_args_data = [saga_id] publish_on_success(None, None, sample_args_data) notification_count = db_access.db_mongo_client[saga_db][ From fb00b1723f250136201a881c87c59baa140246cc Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 22 May 2024 10:06:37 +0330 Subject: [PATCH 32/48] fix: variable refrenced before assignment! --- discord_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/discord_utils.py b/discord_utils.py index 2ae3cb3..c00887c 100644 --- a/discord_utils.py +++ b/discord_utils.py @@ -82,8 +82,7 @@ def get_saga_instance(sagaId: str): def publish_on_success(connection, result, *args, **kwargs): try: sagaId = args[0][0] - msg = f"GUILDID: {guildId}: " - logging.info(f"{msg}SAGAID: {sagaId}: ON_SUCCESS callback! ") + logging.info(f"SAGAID: {sagaId}: ON_SUCCESS callback! ") saga = get_saga_instance(sagaId=sagaId) rabbitmq = RabbitMQSingleton.get_instance().get_client() @@ -95,6 +94,7 @@ def publish_on_success(connection, result, *args, **kwargs): platform_id = saga.data["platformId"] guildId = get_guild_community_ids(platform_id) + msg = f"GUILDID: {guildId}: " if tx_not_started_count != 0: tx = transactions_ordered[0] From f3051c879f7a7f37f3f29417fd2ee1aeda5d9ec8 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 22 May 2024 10:35:19 +0330 Subject: [PATCH 33/48] fix: wrong param reading discord_utils.py! Now as we're just passing the sagaId to publish_on_success, we should use the first param. --- discord_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/discord_utils.py b/discord_utils.py index c00887c..7c655f8 100644 --- a/discord_utils.py +++ b/discord_utils.py @@ -81,8 +81,8 @@ def get_saga_instance(sagaId: str): def publish_on_success(connection, result, *args, **kwargs): try: - sagaId = args[0][0] - logging.info(f"SAGAID: {sagaId}: ON_SUCCESS callback! ") + sagaId = args[0] + logging.info(f"SAGAID: {sagaId}: ON_SUCCESS callback!") saga = get_saga_instance(sagaId=sagaId) rabbitmq = RabbitMQSingleton.get_instance().get_client() From 98d62ee570095c1b97de383975ed11b80cb50f3a Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 22 May 2024 10:44:22 +0330 Subject: [PATCH 34/48] feat: using our redis singletone instance! --- worker.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/worker.py b/worker.py index f13b94e..b2e2eb4 100644 --- a/worker.py +++ b/worker.py @@ -1,8 +1,7 @@ import logging -import redis from rq import Worker -from utils.credentials import get_redis_credentials +from utils.redis import RedisSingleton def worker_exception_handler(job, exc_type, exc_value, traceback): @@ -14,16 +13,10 @@ def worker_exception_handler(job, exc_type, exc_value, traceback): if __name__ == "__main__": - redis_creds = get_redis_credentials() - logging.basicConfig() logging.getLogger().setLevel(logging.INFO) - host = redis_creds["host"] - port = redis_creds["port"] - password = redis_creds["pass"] - - r = redis.Redis(host=host, port=port, password=password) + r = RedisSingleton.get_instance().get_client() worker = Worker( queues=["default"], connection=r, exception_handlers=worker_exception_handler ) From 9c0d695c7136d592d4595315cbd82b4fba4738b4 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 22 May 2024 10:52:12 +0330 Subject: [PATCH 35/48] feat: disabling logs for neo4j and mongodb! --- docker-compose.test.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 504399e..21ed52c 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -42,6 +42,8 @@ services: condition: service_healthy mongo: image: "mongo:6.0.8" + logging: + driver: none environment: - MONGO_INITDB_ROOT_USERNAME=root - MONGO_INITDB_ROOT_PASSWORD=pass @@ -53,6 +55,8 @@ services: start_period: 40s neo4j: image: "neo4j:5.9.0" + logging: + driver: none environment: - NEO4J_AUTH=neo4j/password - NEO4J_PLUGINS=["apoc", "graph-data-science"] From 1cc46c7e375c0e77be80c0c2b2fb3050c94338cf Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 22 May 2024 11:16:11 +0330 Subject: [PATCH 36/48] fix: update test case to be like normal situation! --- tests/integration/test_publish_on_success.py | 24 +++++++++----------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/tests/integration/test_publish_on_success.py b/tests/integration/test_publish_on_success.py index 875b9ba..f1d4e02 100644 --- a/tests/integration/test_publish_on_success.py +++ b/tests/integration/test_publish_on_success.py @@ -26,8 +26,6 @@ def test_publish_on_success_check_notification_choreographies(): saga_id = "000000011111113333377777ie0w" expected_owner_id = "334461287892" db_access = launch_db_access(guild_id) - saga_db = os.getenv("SAGA_DB_NAME") - saga_collection = os.getenv("SAGA_DB_COLLECTION") at_db = os.getenv("AUTOMATION_DB_NAME") at_collection = os.getenv("AUTOMATION_DB_COLLECTION") @@ -36,7 +34,7 @@ def test_publish_on_success_check_notification_choreographies(): ) db_access.db_mongo_client[guild_id].drop_collection("memberactivities") - db_access.db_mongo_client[saga_db].drop_collection(saga_collection) + db_access.db_mongo_client["Saga"].drop_collection("sagas") db_access.db_mongo_client[guild_id].drop_collection("guildmembers") db_access.db_mongo_client[at_db].drop_collection(at_collection) @@ -95,7 +93,7 @@ def test_publish_on_success_check_notification_choreographies(): .strftime("%Y-%m-%dT%H:%M:%S") ) - db_access.db_mongo_client[saga_db][saga_collection].insert_one( + db_access.db_mongo_client["Saga"]["sagas"].insert_one( { "choreography": { "name": "DISCORD_UPDATE_CHANNELS", @@ -326,37 +324,37 @@ def test_publish_on_success_check_notification_choreographies(): connection_uri = f"mongodb://{user}:{password}@{host}:{port}" mongo_creds = { "connection_str": connection_uri, - "db_name": saga_db, - "collection_name": saga_collection, + "db_name": "Saga", + "collection_name": "sagas", } - sample_args_data = [saga_id] + sample_args_data = saga_id publish_on_success(None, None, sample_args_data) - notification_count = db_access.db_mongo_client[saga_db][ - saga_collection + notification_count = db_access.db_mongo_client["Saga"][ + "sagas" ].count_documents({"choreography.name": "DISCORD_NOTIFY_USERS"}) assert notification_count == 4 - user1_doc = db_access.db_mongo_client[saga_db][saga_collection].find_one( + user1_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( {"data.discordId": "1111"} ) assert user1_doc["data"]["message"] == ("hey User1NickName! please get back to us!") - user2_doc = db_access.db_mongo_client[saga_db][saga_collection].find_one( + user2_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( {"data.discordId": "1112"} ) assert user2_doc["data"]["message"] == ( "hey User2GlobalName! please get back to us!" ) - user3_doc = db_access.db_mongo_client[saga_db][saga_collection].find_one( + user3_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( {"data.discordId": "1113"} ) assert user3_doc["data"]["message"] == ("hey user3! please get back to us!") - user_cm_doc = db_access.db_mongo_client[saga_db][saga_collection].find_one( + user_cm_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( {"data.discordId": "999"} ) expected_msg = "hey body! This users were messaged:\n" From 52b6a3ff630dba210591b8efd3b1f8fc603c489e Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 22 May 2024 11:19:19 +0330 Subject: [PATCH 37/48] fix: trying more to detach neo4j & mongodb logs! --- docker-compose.test.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 21ed52c..3bd3a37 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -42,8 +42,7 @@ services: condition: service_healthy mongo: image: "mongo:6.0.8" - logging: - driver: none + attach: false environment: - MONGO_INITDB_ROOT_USERNAME=root - MONGO_INITDB_ROOT_PASSWORD=pass @@ -55,8 +54,7 @@ services: start_period: 40s neo4j: image: "neo4j:5.9.0" - logging: - driver: none + attach: false environment: - NEO4J_AUTH=neo4j/password - NEO4J_PLUGINS=["apoc", "graph-data-science"] From 67d885df33677af03635e0909ebbeeaaaeb4929e Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 22 May 2024 11:26:42 +0330 Subject: [PATCH 38/48] fix: black linter issue --- tests/integration/test_publish_on_success.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integration/test_publish_on_success.py b/tests/integration/test_publish_on_success.py index f1d4e02..f788527 100644 --- a/tests/integration/test_publish_on_success.py +++ b/tests/integration/test_publish_on_success.py @@ -331,9 +331,9 @@ def test_publish_on_success_check_notification_choreographies(): sample_args_data = saga_id publish_on_success(None, None, sample_args_data) - notification_count = db_access.db_mongo_client["Saga"][ - "sagas" - ].count_documents({"choreography.name": "DISCORD_NOTIFY_USERS"}) + notification_count = db_access.db_mongo_client["Saga"]["sagas"].count_documents( + {"choreography.name": "DISCORD_NOTIFY_USERS"} + ) assert notification_count == 4 From 27719733504c09dccdb678e67896968f3b18083a Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 22 May 2024 18:13:32 +0330 Subject: [PATCH 39/48] feat: updating usage of Neo4jOps! Many more fixes TODO. --- .../DB_operations/mongo_neo4j_ops.py | 53 ++++-------- .../DB_operations/network_graph.py | 83 ++++++++++++------- .../neo4j_analysis/analyzer_node_stats.py | 8 +- .../analysis/neo4j_analysis/centrality.py | 24 +++--- .../local_clustering_coefficient.py | 6 +- .../analysis/neo4j_analysis/louvain.py | 29 ++++--- discord_analyzer/analysis/neo4j_metrics.py | 10 +-- .../analysis/neo4j_utils/projection_utils.py | 13 +-- discord_analyzer/analyzer/neo4j_analytics.py | 10 +-- .../analyzer/utils/analyzer_db_manager.py | 9 -- discord_analyzer/rn_analyzer.py | 3 +- .../test_decentralization_score.py | 5 +- .../test_degree_centrality_multiple_guilds.py | 5 +- ...ality_multiple_guilds_preserve_parallel.py | 5 +- ...degree_centrality_parallel_preservation.py | 5 +- tests/integration/test_fragmentation_score.py | 5 +- .../test_fragmentation_score_exclude_past.py | 5 +- .../test_fragmentation_score_from_start.py | 5 +- .../test_fragmentation_score_rescaling.py | 5 +- ..._generated_graph_period_1_year_run_once.py | 4 +- .../test_generated_graph_period_1year.py | 4 +- .../test_generated_graph_period_35_days.py | 5 +- ...generated_graph_period_35_days_run_once.py | 5 +- .../test_generated_graph_period_3_months.py | 5 +- ...enerated_graph_period_3_months_run_once.py | 5 +- .../test_generated_graph_period_6_months.py | 5 +- ...enerated_graph_period_6_months_run_once.py | 5 +- .../test_interacted_in_deletion.py | 4 +- tests/integration/test_lcc_all_connected.py | 4 +- .../test_lcc_partially_connected.py | 4 +- .../test_louvain_algorithm_computation.py | 6 +- ...st_louvain_algorithm_get_computed_dates.py | 8 +- .../integration/test_neo4j_compute_metrics.py | 4 +- .../test_neo4j_compute_metrics_from_start.py | 4 +- ...t_neo4j_projection_utils_computed_dates.py | 13 +-- .../test_network_graph_creation.py | 4 +- tests/integration/test_node_stats.py | 4 +- tests/integration/utils/neo4j_conn.py | 1 - 38 files changed, 193 insertions(+), 189 deletions(-) diff --git a/discord_analyzer/DB_operations/mongo_neo4j_ops.py b/discord_analyzer/DB_operations/mongo_neo4j_ops.py index 79da7cd..6cf781c 100644 --- a/discord_analyzer/DB_operations/mongo_neo4j_ops.py +++ b/discord_analyzer/DB_operations/mongo_neo4j_ops.py @@ -2,7 +2,7 @@ from discord_analyzer.DB_operations.mongodb_interaction import MongoDBOps from discord_analyzer.DB_operations.network_graph import make_neo4j_networkx_query_dict -from tc_neo4j_lib.neo4j_ops import Neo4jOps +from tc_neo4j_lib.neo4j_ops import Neo4jOps, Query class MongoNeo4jDB: @@ -11,34 +11,10 @@ def __init__(self, testing=False): having both databases in one class """ - self.neo4j_ops = None + self.neo4j_ops = Neo4jOps.get_instance() self.mongoOps = None self.testing = testing - def set_neo4j_utils( - self, - db_name: str, - host: str, - port: str, - protocol: str, - user: str, - password: str, - ): - """ - store the neo4j utils instance - """ - self.neo4j_ops = Neo4jOps() - self.neo4j_ops.set_neo4j_db_info( - neo4j_db_name=db_name, - neo4j_protocol=protocol, - neo4j_user=user, - neo4j_password=password, - neo4j_host=host, - neo4j_port=port, - ) - self.neo4j_ops.neo4j_database_connect() - logging.info("Neo4j Connected Successfully!") - def set_mongo_db_ops( self, mongo_user: str, mongo_pass: str, mongo_host: str, mongo_port: str ): @@ -124,8 +100,8 @@ def store_analytics_data( logging.warning("Testing mode enabled! Not saving any data") def run_operations_transaction( - self, guildId, queries_list, remove_memberactivities - ): + self, guildId: str, queries_list: list[Query], remove_memberactivities: bool + ) -> None: """ do the deletion and insertion operations inside a transaction @@ -142,7 +118,7 @@ def run_operations_transaction( """ self.guild_msg = f"GUILDID: {guildId}:" - transaction_queries = [] + transaction_queries: list[Query] = [] if remove_memberactivities: logging.info( f"{self.guild_msg} Neo4J GuildId accounts relation will be removed!" @@ -152,14 +128,13 @@ def run_operations_transaction( ) transaction_queries.append(delete_relationship_query) - # logging.info(queries_list) transaction_queries.extend(queries_list) - self.neo4j_ops.store_data_neo4j(transaction_queries, message=self.guild_msg) + self.neo4j_ops.run_queries_in_batch(transaction_queries, message=self.guild_msg) def _create_guild_rel_deletion_query( self, guildId: str, relation_name: str = "INTERACTED_WITH" - ): + ) -> Query: """ create a query to delete the relationships between DiscordAccount users in a specific guild @@ -176,11 +151,19 @@ def _create_guild_rel_deletion_query( final_query : str the final query to remove the relationships """ - - delete_relationship_query = f""" + query_str = f""" MATCH (:DiscordAccount) -[r:{relation_name} {{guildId: '{guildId}'}}]-(:DiscordAccount) DETACH DELETE r""" - return delete_relationship_query + parameters = { + "relation_name": relation_name, + "guild_id": guildId, + } + + query = Query( + query=query_str, + parameters=parameters, + ) + return query diff --git a/discord_analyzer/DB_operations/network_graph.py b/discord_analyzer/DB_operations/network_graph.py index b2dfec6..cf131c1 100644 --- a/discord_analyzer/DB_operations/network_graph.py +++ b/discord_analyzer/DB_operations/network_graph.py @@ -3,13 +3,14 @@ import datetime import networkx +from tc_neo4j_lib import Query def make_neo4j_networkx_query_dict( networkx_graphs: dict[datetime.datetime, networkx.classes.graph.Graph], guildId: str, community_id: str, -): +) -> list[Query]: """ make a list of queries to store networkx graphs into the neo4j @@ -26,7 +27,7 @@ def make_neo4j_networkx_query_dict( Returns: ----------- - queries_list : list + queries_list : list[Query] list of string queries to store data into neo4j """ # extract the graphs and their corresponding interaction dates @@ -53,7 +54,7 @@ def make_graph_list_query( guildId: str, community_id: str, toGuildRelation: str = "IS_MEMBER", -): +) -> list[Query]: """ Make a list of queries for each graph to save their results @@ -75,10 +76,10 @@ def make_graph_list_query( Returns: --------- - final_queries : list of str + final_queries : list[Query] list of strings, each is a query for an interaction graph to be created """ - final_queries = [] + final_queries: list[Query] = [] for graph, date in zip(networkx_graphs, networkx_dates): nodes_dict = graph.nodes.data() @@ -104,7 +105,7 @@ def create_community_node_query( community_id: str, guild_id: str, community_node: str = "Community", -) -> str: +) -> Query: """ create the community node @@ -114,20 +115,32 @@ def create_community_node_query( the community id to create its node guild_id : str the guild node to attach to community + + Returns + --------- + query : Query + the query to run on neo4j to create community node """ date_now_timestamp = get_timestamp() - query = f""" - MERGE (g:Guild {{guildId: '{guild_id}'}}) - ON CREATE SET g.createdAt = {int(date_now_timestamp)} + query_str = f""" + MERGE (g:Guild {{guildId: $guild_id}}) + ON CREATE SET g.createdAt = $date_now WITH g - MERGE (c:{community_node} {{id: '{community_id}'}}) - ON CREATE SET c.createdAt = {int(date_now_timestamp)} + MERGE (c:{community_node} {{id: $community_id}}) + ON CREATE SET c.createdAt = $date_now WITH g, c MERGE (g) -[r:IS_WITHIN]-> (c) - ON CREATE SET r.createdAt = {int(date_now_timestamp)} + ON CREATE SET r.createdAt = $date_now """ + parameters = { + "guild_id": guild_id, + "date_now": int(date_now_timestamp), + "community_id": community_id, + } + query = Query(query_str, parameters) + return query @@ -139,7 +152,7 @@ def create_network_query( nodes_type: str = "DiscordAccount", rel_type: str = "INTERACTED_WITH", toGuildRelation: str = "IS_MEMBER", -): +) -> tuple[list[Query], list[Query]]: """ make string query to save the accounts with their account_name and relationships with their relation from **a graph**. @@ -164,9 +177,9 @@ def create_network_query( Returns: ---------- - node_queries : list of str + node_queries : list[Query] the list of MERGE queries for creating all nodes - rel_queries : list of str + rel_queries : list[Query] the list of MERGE queries for creating all relationships """ # getting the timestamp `date` @@ -174,8 +187,8 @@ def create_network_query( date_now_timestamp = get_timestamp() # initializiation of queries - rel_queries = [] - node_queries = [] + rel_queries: list[Query] = [] + node_queries: list[Query] = [] for node in nodes_dict: node_str_query = "" @@ -186,27 +199,33 @@ def create_network_query( node_acc_name = node[1]["acc_name"] # creating the query node_str_query += ( - f"MERGE (a{node_num}:{nodes_type} {{userId: '{node_acc_name}'}}) " + f"MERGE (a{node_num}:{nodes_type} {{userId: $node_acc_name}}) " ) node_str_query += f"""ON CREATE SET a{node_num}.createdAt = - {int(date_now_timestamp)} + $date_now_timestamp """ # relationship query between users and guilds if guildId is not None: # creating the guilds if they weren't created before node_str_query += f"""MERGE (g:Guild {{guildId: '{guildId}'}}) - ON CREATE SET g.createdAt = {int(date_now_timestamp)} + ON CREATE SET g.createdAt = $date_now_timestamp """ node_str_query += f""" MERGE (a{node_num}) -[rel_guild{node_num}:{toGuildRelation}]-> (g) ON CREATE SET - rel_guild{node_num}.createdAt = {int(date_now_timestamp)} + rel_guild{node_num}.createdAt = $date_now_timestamp """ - node_queries.append(node_str_query + ";") + parameters = { + "node_acc_name": node_acc_name, + date_now_timestamp: int(date_now_timestamp), + } + query_str = node_str_query + ";" + + node_queries.append(Query(query_str, parameters)) for idx, edge in enumerate(edge_dict): rel_str_query = "" @@ -225,19 +244,27 @@ def create_network_query( interaction_count = edge[2]["weight"] rel_str_query += f"""MATCH (a{starting_acc_num}:{nodes_type} - {{userId: \'{starting_node_acc_name}\'}}) + {{userId: $starting_node_acc_name}}) MATCH (a{ending_acc_num}:{nodes_type} - {{userId: \'{ending_node_acc_name}\'}}) + {{userId: $ending_node_acc_name}}) MERGE (a{starting_acc_num}) -[rel{idx}:{rel_type} {{ - date: {int(graph_date_timestamp)}, - weight: {int(interaction_count)}, - guildId: '{guildId}' + date: $date, + weight: $weight, + guildId: $guild_id }} ]-> (a{ending_acc_num}) """ - rel_queries.append(rel_str_query + ";") + query_str = rel_str_query + ";" + parameters = { + "starting_node_acc_name": starting_node_acc_name, + "ending_node_acc_name": ending_node_acc_name, + "date": int(graph_date_timestamp), + "weight": int(interaction_count), + "guild_id": guildId, + } + rel_queries.append(Query(query_str, parameters)) return node_queries, rel_queries diff --git a/discord_analyzer/analysis/neo4j_analysis/analyzer_node_stats.py b/discord_analyzer/analysis/neo4j_analysis/analyzer_node_stats.py index 5741669..14c76b2 100644 --- a/discord_analyzer/analysis/neo4j_analysis/analyzer_node_stats.py +++ b/discord_analyzer/analysis/neo4j_analysis/analyzer_node_stats.py @@ -123,13 +123,15 @@ def get_computed_dates( """ get the computed dates of our guild """ - query = f""" + query = """ MATCH (:DiscordAccount) - -[r:INTERACTED_IN]->(g:Guild {{guildId: '{guildId}'}}) + -[r:INTERACTED_IN]->(g:Guild {guildId: $guild_id}) WHERE r.status IS NOT NULL RETURN r.date as computed_dates """ - computed_dates = projection_utils.get_computed_dates(query=query) + computed_dates = projection_utils.get_computed_dates( + query=query, guild_id=guildId + ) return computed_dates diff --git a/discord_analyzer/analysis/neo4j_analysis/centrality.py b/discord_analyzer/analysis/neo4j_analysis/centrality.py index bf7bf17..5bf03b1 100644 --- a/discord_analyzer/analysis/neo4j_analysis/centrality.py +++ b/discord_analyzer/analysis/neo4j_analysis/centrality.py @@ -4,7 +4,7 @@ import pandas as pd from discord_analyzer.analysis.neo4j_metrics import Neo4JMetrics from discord_analyzer.analysis.neo4j_utils.projection_utils import ProjectionUtils -from tc_neo4j_lib.neo4j_ops import Neo4jOps +from tc_neo4j_lib.neo4j_ops import Neo4jOps, Query class Centerality: @@ -143,13 +143,13 @@ def _get_dates_to_compute( guildId : str the guildId to get computations date """ - query = f""" - MATCH (g:Guild {{guildId: '{guildId}'}}) + query = """ + MATCH (g:Guild {guildId: $guild_id}) -[r:HAVE_METRICS] -> (g) WHERE r.decentralizationScore IS NOT NULL RETURN r.date as computed_dates """ - computed_dates = projection_utils.get_computed_dates(query) + computed_dates = projection_utils.get_computed_dates(query, guild_id=guildId) dates_to_compute = user_interaction_dates - computed_dates @@ -348,18 +348,18 @@ def save_decentralization_score( the network decentrality scores over time """ # preparing the queries - queries = [] + queries: list[Query] = [] for date in decentrality_score.keys(): - query = f""" - MATCH (g: Guild {{guildId: '{guildId}'}}) - MERGE (g) -[r:HAVE_METRICS {{ - date: {date} - }}]-> (g) - SET r.decentralizationScore = {decentrality_score[date]} + query_str = """ + MATCH (g: Guild {guildId: $guild_id}) + MERGE (g) -[r:HAVE_METRICS {date: $date}]-> (g) + SET r.decentralizationScore = $score """ + parameters = {"guild_id": guildId, "score": decentrality_score[date]} + query = Query(query=query_str, parameters=parameters) queries.append(query) - self.neo4j_ops.store_data_neo4j( + self.neo4j_ops.run_queries_in_batch( queries, message=f"GUILDID: {guildId}: Saving Network Decentrality:", ) diff --git a/discord_analyzer/analysis/neo4j_analysis/local_clustering_coefficient.py b/discord_analyzer/analysis/neo4j_analysis/local_clustering_coefficient.py index 869cd8f..49fabfd 100644 --- a/discord_analyzer/analysis/neo4j_analysis/local_clustering_coefficient.py +++ b/discord_analyzer/analysis/neo4j_analysis/local_clustering_coefficient.py @@ -118,13 +118,13 @@ def get_computed_dates( the computation dates """ # getting the dates computed before - query = f""" + query = """ MATCH (:DiscordAccount) - -[r:INTERACTED_IN]->(g:Guild {{guildId: '{guildId}'}}) + -[r:INTERACTED_IN]->(g:Guild {guildId: $guild_id}) WHERE r.localClusteringCoefficient IS NOT NULL RETURN r.date as computed_dates """ - computed_dates = projection_utils.get_computed_dates(query) + computed_dates = projection_utils.get_computed_dates(query, guild_id=guildId) return computed_dates diff --git a/discord_analyzer/analysis/neo4j_analysis/louvain.py b/discord_analyzer/analysis/neo4j_analysis/louvain.py index 82eb060..94c8073 100644 --- a/discord_analyzer/analysis/neo4j_analysis/louvain.py +++ b/discord_analyzer/analysis/neo4j_analysis/louvain.py @@ -6,11 +6,11 @@ class Louvain: - def __init__(self, neo4j_ops: Neo4jOps) -> None: + def __init__(self) -> None: """ louvain algorithm wrapper to compute """ - self.neo4j_ops = neo4j_ops + self.neo4j_ops = Neo4jOps.get_instance() def compute(self, guild_id: str, from_start: bool = False) -> None: """ @@ -25,7 +25,7 @@ def compute(self, guild_id: str, from_start: bool = False) -> None: if True, then would compute from start default is False """ - projection_utils = ProjectionUtils(gds=self.neo4j_ops.gds, guildId=guild_id) + projection_utils = ProjectionUtils(guildId=guild_id) computable_dates = projection_utils.get_dates(guildId=guild_id) @@ -105,13 +105,13 @@ def get_computed_dates( the computation dates """ # getting the dates computed before - query = f""" - MATCH (g:Guild {{guildId: '{guildId}'}}) + query = """ + MATCH (g:Guild {guildId: $guild_id}) -[r:HAVE_METRICS]->(g) WHERE r.louvainModularityScore IS NOT NULL RETURN r.date as computed_dates """ - computed_dates = projection_utils.get_computed_dates(query) + computed_dates = projection_utils.get_computed_dates(query, guild_id=guildId) return computed_dates @@ -134,16 +134,19 @@ def compute_graph_louvain( msg = f"GUILDID: {guild_id}" try: _ = self.neo4j_ops.gds.run_cypher( - f""" - CALL gds.louvain.stats("{graph_name}") + """ + CALL gds.louvain.stats($graph_name) YIELD modularity WITH modularity - MATCH (g:Guild {{guildId: '{guild_id}'}}) - MERGE (g) -[r:HAVE_METRICS {{ - date: {date} - }}]-> (g) + MATCH (g:Guild {guildId: $guild_id}) + MERGE (g) -[r:HAVE_METRICS { + date: $date + }]-> (g) SET r.louvainModularityScore = modularity - """ + """, + graph_name=graph_name, + guild_id=guild_id, + date=date, ) except Exception as exp: logging.error( diff --git a/discord_analyzer/analysis/neo4j_metrics.py b/discord_analyzer/analysis/neo4j_metrics.py index b23d6b3..bbc0d67 100644 --- a/discord_analyzer/analysis/neo4j_metrics.py +++ b/discord_analyzer/analysis/neo4j_metrics.py @@ -1,8 +1,7 @@ import os from discord_analyzer.analysis.neo4j_utils.compute_metrics import Neo4JMetrics -from dotenv import load_dotenv -from tc_neo4j_lib.neo4j_ops import Neo4jOps +from dotenv import load_dotenvfrom tc_neo4j_lib.neo4j_ops import Neo4jOps def degree_centrality( @@ -206,10 +205,9 @@ def decenterialization_score(neo4j_analytics, centrality_scores): results_degreeCenterality["score_undirected"] = results_degreeCenterality["score"] # normalizing undirected scores - results_degreeCenterality[ - "normalized_score_undirected" - ] = results_degreeCenterality["score"] / sum( - results_degreeCenterality["score"].values > 0 + results_degreeCenterality["normalized_score_undirected"] = ( + results_degreeCenterality["score"] + / sum(results_degreeCenterality["score"].values > 0) ) # the normalization over positive score_out results_degreeCenterality["normalized_score_out"] = results_degreeCenterality[ diff --git a/discord_analyzer/analysis/neo4j_utils/projection_utils.py b/discord_analyzer/analysis/neo4j_utils/projection_utils.py index 070714d..3201071 100644 --- a/discord_analyzer/analysis/neo4j_utils/projection_utils.py +++ b/discord_analyzer/analysis/neo4j_utils/projection_utils.py @@ -1,11 +1,10 @@ import logging - -from graphdatascience import GraphDataScience +from tc_neo4j_lib.neo4j_ops import Neo4jOps class ProjectionUtils: - def __init__(self, gds: GraphDataScience, guildId: str) -> None: - self.gds = gds + def __init__(self, guildId: str) -> None: + self.gds = Neo4jOps.get_instance().gds self.guildId = guildId def project_temp_graph( @@ -123,7 +122,7 @@ def get_dates(self, guildId: str) -> set[float]: return computable_dates_set - def get_computed_dates(self, query: str) -> set[float]: + def get_computed_dates(self, query: str, **params) -> set[float]: """ get the computed metric dates for that specific query @@ -133,8 +132,10 @@ def get_computed_dates(self, query: str) -> set[float]: the query to get the computed dates of a metric must have one return results with label of computed_dates first one is date + params: Dict[str, Any] + parameters to the query """ - dates = self.gds.run_cypher(query) + dates = self.gds.run_cypher(query, params) computed_dates = set(dates["computed_dates"].values) return computed_dates diff --git a/discord_analyzer/analyzer/neo4j_analytics.py b/discord_analyzer/analyzer/neo4j_analytics.py index 3672dae..3063662 100644 --- a/discord_analyzer/analyzer/neo4j_analytics.py +++ b/discord_analyzer/analyzer/neo4j_analytics.py @@ -11,12 +11,12 @@ class Neo4JAnalytics: - def __init__(self, neo4j_ops: Neo4jOps) -> None: + def __init__(self) -> None: """ neo4j metrics to be compute input variables are all the neo4j credentials """ - self.neo4j_ops = neo4j_ops + self.neo4j_ops = Neo4jOps.get_instance() def compute_metrics(self, guildId: str, from_start: bool) -> None: """ @@ -116,7 +116,7 @@ def compute_network_decentrality(self, guildId: str, from_start: bool): """ msg = f"GUILDID: {guildId}:" try: - centrality = Centerality(self.neo4j_ops) + centrality = Centerality() # degree decentrality _ = centrality.compute_network_decentrality( guildId=guildId, from_start=from_start @@ -137,7 +137,7 @@ def compute_node_stats(self, guildId: str, from_start: bool): msg = f"GUILDID: {guildId}:" try: logging.info(f"{msg}: computing node stats") - node_stats = NodeStats(self.neo4j_ops, threshold=2) + node_stats = NodeStats(threshold=2) node_stats.compute_stats(guildId, from_start) except Exception as exp: logging.error(f"{msg} Exception occured in node stats computation, {exp}") @@ -170,6 +170,6 @@ def compute_louvain_algorithm(self, guild_id: str, from_start: bool) -> None: from_start : bool compute from the start of the data available or continue the previous """ - louvain = Louvain(self.neo4j_ops) + louvain = Louvain() louvain.compute(guild_id, from_start) diff --git a/discord_analyzer/analyzer/utils/analyzer_db_manager.py b/discord_analyzer/analyzer/utils/analyzer_db_manager.py index 7f7ad22..53abee6 100644 --- a/discord_analyzer/analyzer/utils/analyzer_db_manager.py +++ b/discord_analyzer/analyzer/utils/analyzer_db_manager.py @@ -63,12 +63,3 @@ def database_connect(self): mongo_host=self.mongo_host, mongo_port=self.mongo_port, ) - - self.DB_connections.set_neo4j_utils( - db_name=self.neo4j_db_name, - host=self.neo4j_host, - port=self.neo4j_port, - protocol=self.neo4j_protocol, - user=self.neo4j_user, - password=self.neo4j_password, - ) diff --git a/discord_analyzer/rn_analyzer.py b/discord_analyzer/rn_analyzer.py index f4a519f..9a76629 100644 --- a/discord_analyzer/rn_analyzer.py +++ b/discord_analyzer/rn_analyzer.py @@ -30,7 +30,8 @@ def setup_neo4j_metrics(self) -> None: """ setup the neo4j analytics wrapper """ - self.neo4j_analytics = Neo4JAnalytics(neo4j_ops=self.DB_connections.neo4j_ops) + + self.neo4j_analytics = Neo4JAnalytics() def run_once(self): """Run analysis once (Wrapper)""" diff --git a/tests/integration/test_decentralization_score.py b/tests/integration/test_decentralization_score.py index 80a1c93..25b4732 100644 --- a/tests/integration/test_decentralization_score.py +++ b/tests/integration/test_decentralization_score.py @@ -1,7 +1,6 @@ # the nodes of the graph are partially connected from discord_analyzer.analysis.neo4j_analysis.centrality import Centerality - -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_decentralization_score(): @@ -13,7 +12,7 @@ def test_decentralization_score(): https://miro.com/app/board/uXjVM7GdYqo=/?moveToWidget=3458764558210553321&cot=14 """ guildId = "1234" - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() centrality = Centerality(neo4j_ops) # deleting all data diff --git a/tests/integration/test_degree_centrality_multiple_guilds.py b/tests/integration/test_degree_centrality_multiple_guilds.py index 432cee9..141b93b 100644 --- a/tests/integration/test_degree_centrality_multiple_guilds.py +++ b/tests/integration/test_degree_centrality_multiple_guilds.py @@ -1,8 +1,7 @@ # we have nodes of a community is connected to another one # meaning we have nodes available in more than one community from discord_analyzer.analysis.neo4j_analysis.centrality import Centerality - -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_multiple_guilds(): @@ -16,7 +15,7 @@ def test_multiple_guilds(): https://miro.com/app/board/uXjVM7GdYqo=/?share_link_id=105382864070 """ guildId = "1234" - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_degree_centrality_multiple_guilds_preserve_parallel.py b/tests/integration/test_degree_centrality_multiple_guilds_preserve_parallel.py index 2de197d..0434914 100644 --- a/tests/integration/test_degree_centrality_multiple_guilds_preserve_parallel.py +++ b/tests/integration/test_degree_centrality_multiple_guilds_preserve_parallel.py @@ -1,8 +1,7 @@ # we have nodes of a community is connected to another one # meaning we have nodes available in more than one community from discord_analyzer.analysis.neo4j_analysis.centrality import Centerality - -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_multiple_guilds_preserve_parallel(): @@ -16,7 +15,7 @@ def test_multiple_guilds_preserve_parallel(): https://miro.com/app/board/uXjVM7GdYqo=/?share_link_id=105382864070 """ guildId = "1234" - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() centrality = Centerality(neo4j_ops) # deleting all data diff --git a/tests/integration/test_degree_centrality_parallel_preservation.py b/tests/integration/test_degree_centrality_parallel_preservation.py index af93399..1c3e0e9 100644 --- a/tests/integration/test_degree_centrality_parallel_preservation.py +++ b/tests/integration/test_degree_centrality_parallel_preservation.py @@ -1,7 +1,6 @@ # the nodes of the graph are partially connected from discord_analyzer.analysis.neo4j_analysis.centrality import Centerality - -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_partially_connected_coeffs(): @@ -13,7 +12,7 @@ def test_partially_connected_coeffs(): https://miro.com/app/board/uXjVM7GdYqo=/?share_link_id=105382864070 """ guildId = "1234" - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_fragmentation_score.py b/tests/integration/test_fragmentation_score.py index a81b135..1708e68 100644 --- a/tests/integration/test_fragmentation_score.py +++ b/tests/integration/test_fragmentation_score.py @@ -1,15 +1,14 @@ from datetime import datetime, timedelta from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics - -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_avg_clustering_coeff(): """ test scaling of the avgClusteringCoefficient (a.k.a fragmentation score) """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_analytics = Neo4JAnalytics(neo4j_ops) # deleting all data diff --git a/tests/integration/test_fragmentation_score_exclude_past.py b/tests/integration/test_fragmentation_score_exclude_past.py index 312328e..40ba8f9 100644 --- a/tests/integration/test_fragmentation_score_exclude_past.py +++ b/tests/integration/test_fragmentation_score_exclude_past.py @@ -1,15 +1,14 @@ from datetime import datetime, timedelta from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics - -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_avg_clustering_exclude_past(): """ test scaling of the avgClusteringCoefficient (a.k.a fragmentation score) """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_analytics = Neo4JAnalytics(neo4j_ops) # deleting all data diff --git a/tests/integration/test_fragmentation_score_from_start.py b/tests/integration/test_fragmentation_score_from_start.py index 700f385..404bce0 100644 --- a/tests/integration/test_fragmentation_score_from_start.py +++ b/tests/integration/test_fragmentation_score_from_start.py @@ -1,15 +1,14 @@ from datetime import datetime, timedelta from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics - -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_avg_clustering_coeff_from_start(): """ test scaling of the avgClusteringCoefficient (a.k.a fragmentation score) """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_analytics = Neo4JAnalytics(neo4j_ops) # deleting all data diff --git a/tests/integration/test_fragmentation_score_rescaling.py b/tests/integration/test_fragmentation_score_rescaling.py index 40c8975..607df8f 100644 --- a/tests/integration/test_fragmentation_score_rescaling.py +++ b/tests/integration/test_fragmentation_score_rescaling.py @@ -1,15 +1,14 @@ from datetime import datetime, timedelta from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics - -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_avg_clustering_coeff_scaling(): """ test scaling of the avgClusteringCoefficient (a.k.a fragmentation score) """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_analytics = Neo4JAnalytics(neo4j_ops) # deleting all data diff --git a/tests/integration/test_generated_graph_period_1_year_run_once.py b/tests/integration/test_generated_graph_period_1_year_run_once.py index 875616c..8ba7556 100644 --- a/tests/integration/test_generated_graph_period_1_year_run_once.py +++ b/tests/integration/test_generated_graph_period_1_year_run_once.py @@ -5,7 +5,7 @@ from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild @@ -20,7 +20,7 @@ def test_networkgraph_one_year_period_run_once_available_analytics(): community_id = "aabbccddeeff001122334455" platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_ops.gds.run_cypher( """ diff --git a/tests/integration/test_generated_graph_period_1year.py b/tests/integration/test_generated_graph_period_1year.py index db62ac2..3404075 100644 --- a/tests/integration/test_generated_graph_period_1year.py +++ b/tests/integration/test_generated_graph_period_1year.py @@ -5,7 +5,7 @@ from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild @@ -20,7 +20,7 @@ def test_networkgraph_one_year_period_recompute_available_analytics(): community_id = "aabbccddeeff001122334455" platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_ops.gds.run_cypher( """ diff --git a/tests/integration/test_generated_graph_period_35_days.py b/tests/integration/test_generated_graph_period_35_days.py index f05709c..0d20b41 100644 --- a/tests/integration/test_generated_graph_period_35_days.py +++ b/tests/integration/test_generated_graph_period_35_days.py @@ -5,7 +5,8 @@ from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data -from .utils.neo4j_conn import neo4j_setup + +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild @@ -20,7 +21,7 @@ def test_networkgraph_35_days_period_recompute_available_analytics(): platform_id = "515151515151515151515151" community_id = "aabbccddeeff001122334455" db_access = launch_db_access(guildId) - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_ops.gds.run_cypher( """ diff --git a/tests/integration/test_generated_graph_period_35_days_run_once.py b/tests/integration/test_generated_graph_period_35_days_run_once.py index 41b8f62..8e06333 100644 --- a/tests/integration/test_generated_graph_period_35_days_run_once.py +++ b/tests/integration/test_generated_graph_period_35_days_run_once.py @@ -5,7 +5,8 @@ from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data -from .utils.neo4j_conn import neo4j_setup + +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild @@ -20,7 +21,7 @@ def test_networkgraph_35_days_period_run_once_available_analytics(): platform_id = "515151515151515151515151" community_id = "aabbccddeeff001122334455" db_access = launch_db_access(guildId) - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_ops.gds.run_cypher( """ diff --git a/tests/integration/test_generated_graph_period_3_months.py b/tests/integration/test_generated_graph_period_3_months.py index bd170e7..ddae3dc 100644 --- a/tests/integration/test_generated_graph_period_3_months.py +++ b/tests/integration/test_generated_graph_period_3_months.py @@ -5,7 +5,8 @@ from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data -from .utils.neo4j_conn import neo4j_setup + +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild @@ -20,7 +21,7 @@ def test_networkgraph_three_months_period_recompute_available_analytics(): community_id = "aabbccddeeff001122334455" platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_ops.gds.run_cypher( """ diff --git a/tests/integration/test_generated_graph_period_3_months_run_once.py b/tests/integration/test_generated_graph_period_3_months_run_once.py index 5e4b134..9d71b55 100644 --- a/tests/integration/test_generated_graph_period_3_months_run_once.py +++ b/tests/integration/test_generated_graph_period_3_months_run_once.py @@ -5,7 +5,8 @@ from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data -from .utils.neo4j_conn import neo4j_setup + +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild @@ -20,7 +21,7 @@ def test_networkgraph_three_months_period_run_once_available_analytics(): platform_id = "515151515151515151515151" community_id = "aabbccddeeff001122334455" db_access = launch_db_access(guildId) - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_ops.gds.run_cypher( """ diff --git a/tests/integration/test_generated_graph_period_6_months.py b/tests/integration/test_generated_graph_period_6_months.py index 01ee33c..834f615 100644 --- a/tests/integration/test_generated_graph_period_6_months.py +++ b/tests/integration/test_generated_graph_period_6_months.py @@ -5,7 +5,8 @@ from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data -from .utils.neo4j_conn import neo4j_setup + +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild @@ -20,7 +21,7 @@ def test_networkgraph_six_months_period_recompute_available_analytics(): platform_id = "515151515151515151515151" community_id = "aabbccddeeff001122334455" db_access = launch_db_access(guildId) - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_ops.gds.run_cypher( """ diff --git a/tests/integration/test_generated_graph_period_6_months_run_once.py b/tests/integration/test_generated_graph_period_6_months_run_once.py index e76f635..a655fac 100644 --- a/tests/integration/test_generated_graph_period_6_months_run_once.py +++ b/tests/integration/test_generated_graph_period_6_months_run_once.py @@ -5,7 +5,8 @@ from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data -from .utils.neo4j_conn import neo4j_setup + +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild @@ -21,7 +22,7 @@ def test_networkgraph_six_months_period_run_once_available_analytics(): platform_id = "515151515151515151515151" db_access = launch_db_access(guildId) - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_ops.gds.run_cypher( """ diff --git a/tests/integration/test_interacted_in_deletion.py b/tests/integration/test_interacted_in_deletion.py index 01a854d..df09d1f 100644 --- a/tests/integration/test_interacted_in_deletion.py +++ b/tests/integration/test_interacted_in_deletion.py @@ -1,6 +1,6 @@ from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_interacted_in_deletion(): @@ -8,7 +8,7 @@ def test_interacted_in_deletion(): test whether we're deleting the INTERACTED_IN relations or not """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() neo4j_analytics = Neo4JAnalytics(neo4j_ops) neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_lcc_all_connected.py b/tests/integration/test_lcc_all_connected.py index f580c0a..0955f91 100644 --- a/tests/integration/test_lcc_all_connected.py +++ b/tests/integration/test_lcc_all_connected.py @@ -3,7 +3,7 @@ LocalClusteringCoeff, ) -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_all_connected_coeffs(): @@ -16,7 +16,7 @@ def test_all_connected_coeffs(): To see more info for this test: https://miro.com/app/board/uXjVM7GdYqo=/?share_link_id=105382864070 """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_lcc_partially_connected.py b/tests/integration/test_lcc_partially_connected.py index 6ed6f78..5cd81e7 100644 --- a/tests/integration/test_lcc_partially_connected.py +++ b/tests/integration/test_lcc_partially_connected.py @@ -3,7 +3,7 @@ LocalClusteringCoeff, ) -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_partially_connected_coeffs(): @@ -14,7 +14,7 @@ def test_partially_connected_coeffs(): To see more info for this test: https://miro.com/app/board/uXjVM7GdYqo=/?share_link_id=105382864070 """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_louvain_algorithm_computation.py b/tests/integration/test_louvain_algorithm_computation.py index b08c224..94141d3 100644 --- a/tests/integration/test_louvain_algorithm_computation.py +++ b/tests/integration/test_louvain_algorithm_computation.py @@ -1,13 +1,13 @@ from discord_analyzer.analysis.neo4j_analysis.louvain import Louvain -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_louvain_algorithm_available_data(): """ test the louvain algorithm with some nodes connected """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") @@ -55,7 +55,7 @@ def test_louvain_algorithm_more_available_data(): """ test the louvain algorithm with some more data available """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_louvain_algorithm_get_computed_dates.py b/tests/integration/test_louvain_algorithm_get_computed_dates.py index c2a1a13..c1c6cbe 100644 --- a/tests/integration/test_louvain_algorithm_get_computed_dates.py +++ b/tests/integration/test_louvain_algorithm_get_computed_dates.py @@ -1,14 +1,14 @@ from discord_analyzer.analysis.neo4j_analysis.louvain import Louvain from discord_analyzer.analysis.neo4j_utils.projection_utils import ProjectionUtils -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_louvain_get_computed_dates_empty_data(): """ test with empty data for getting the computed dates """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") @@ -50,7 +50,7 @@ def test_louvain_get_computed_dates_empty_data_with_have_metrics_relation(): """ test with empty data for getting the computed dates """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") @@ -93,7 +93,7 @@ def test_louvain_get_computed_dates_one_data(): """ test with empty data for getting the computed dates """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_neo4j_compute_metrics.py b/tests/integration/test_neo4j_compute_metrics.py index e2d0295..62e4c8b 100644 --- a/tests/integration/test_neo4j_compute_metrics.py +++ b/tests/integration/test_neo4j_compute_metrics.py @@ -1,7 +1,7 @@ import numpy as np from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_guild_results_available(): @@ -12,7 +12,7 @@ def test_guild_results_available(): and decentralization scores are available in guild node and localClustetingCoefficient is available in DiscordAccount nodes """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_neo4j_compute_metrics_from_start.py b/tests/integration/test_neo4j_compute_metrics_from_start.py index 33d0d7d..1233232 100644 --- a/tests/integration/test_neo4j_compute_metrics_from_start.py +++ b/tests/integration/test_neo4j_compute_metrics_from_start.py @@ -1,7 +1,7 @@ import numpy as np from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_neo4j_compute_metrics_from_start(): @@ -12,7 +12,7 @@ def test_neo4j_compute_metrics_from_start(): and decentralization scores are available in guild node and localClustetingCoefficient is available in DiscordAccount nodes """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_neo4j_projection_utils_computed_dates.py b/tests/integration/test_neo4j_projection_utils_computed_dates.py index ecbf3e3..9804859 100644 --- a/tests/integration/test_neo4j_projection_utils_computed_dates.py +++ b/tests/integration/test_neo4j_projection_utils_computed_dates.py @@ -1,13 +1,13 @@ from discord_analyzer.analysis.neo4j_utils.projection_utils import ProjectionUtils -from .utils.neo4j_conn import neo4j_setup +# from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_neo4j_projection_utils_get_computed_dates(): """ testing the projection utils get_computed_dates """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") @@ -65,13 +65,14 @@ def test_neo4j_projection_utils_get_computed_dates(): SET r12.guildId = '{guildId}' """ ) - projection_utils = ProjectionUtils(neo4j_ops.gds, guildId=guildId) + projection_utils = ProjectionUtils(guildId=guildId) computed_dates = projection_utils.get_computed_dates( - f""" - MATCH (:DiscordAccount)-[r:INTERACTED_IN]->(g:Guild {{guildId: '{guildId}'}}) + """ + MATCH (:DiscordAccount)-[r:INTERACTED_IN]->(g:Guild {guildId: $guild_id}) WHERE r.localClusteringCoefficient is NOT NULL RETURN r.date as computed_dates - """ + """, + guild_id=guildId, ) print(computed_dates) diff --git a/tests/integration/test_network_graph_creation.py b/tests/integration/test_network_graph_creation.py index e2cc15e..6464282 100644 --- a/tests/integration/test_network_graph_creation.py +++ b/tests/integration/test_network_graph_creation.py @@ -6,12 +6,12 @@ from discord_analyzer.analysis.utils.activity import Activity from .utils.mock_graph import generate_mock_graph, store_mock_data_in_neo4j -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_network_graph_create(): community_id = "4321" - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_node_stats.py b/tests/integration/test_node_stats.py index 430cf18..5bda02b 100644 --- a/tests/integration/test_node_stats.py +++ b/tests/integration/test_node_stats.py @@ -1,7 +1,7 @@ # test out local clustering coefficient with all nodes connected from discord_analyzer.analysis.neo4j_analysis.analyzer_node_stats import NodeStats -from .utils.neo4j_conn import neo4j_setup +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_node_stats(): @@ -12,7 +12,7 @@ def test_node_stats(): To see the graph for this test: https://miro.com/app/board/uXjVM7GdYqo=/?share_link_id=105382864070 """ - neo4j_ops = neo4j_setup() + neo4j_ops = Neo4jOps.get_instance() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/utils/neo4j_conn.py b/tests/integration/utils/neo4j_conn.py index 8a534be..7a7c163 100644 --- a/tests/integration/utils/neo4j_conn.py +++ b/tests/integration/utils/neo4j_conn.py @@ -1,7 +1,6 @@ import os from dotenv import load_dotenv -from tc_neo4j_lib.neo4j_ops import Neo4jOps def neo4j_setup() -> Neo4jOps: From 0f677d3da37b13d93be0452a0cc52ac38b549272 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Wed, 22 May 2024 18:15:31 +0330 Subject: [PATCH 40/48] fix: import syntax error! --- discord_analyzer/analysis/neo4j_metrics.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/discord_analyzer/analysis/neo4j_metrics.py b/discord_analyzer/analysis/neo4j_metrics.py index bbc0d67..8750476 100644 --- a/discord_analyzer/analysis/neo4j_metrics.py +++ b/discord_analyzer/analysis/neo4j_metrics.py @@ -1,7 +1,8 @@ import os from discord_analyzer.analysis.neo4j_utils.compute_metrics import Neo4JMetrics -from dotenv import load_dotenvfrom tc_neo4j_lib.neo4j_ops import Neo4jOps +from dotenv import load_dotenv +from tc_neo4j_lib.neo4j_ops import Neo4jOps def degree_centrality( From 7e629ecd9e9ec2d7eda80f4da8934214de8cd11c Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Thu, 23 May 2024 10:08:52 +0330 Subject: [PATCH 41/48] fix: removing the manually creation of neo4j instance! --- analyzer_init.py | 5 +- .../DB_operations/mongo_neo4j_ops.py | 1 + .../DB_operations/network_graph.py | 2 +- .../neo4j_analysis/analyzer_node_stats.py | 40 +-- .../analysis/neo4j_analysis/centrality.py | 23 +- .../local_clustering_coefficient.py | 40 +-- .../analysis/neo4j_analysis/louvain.py | 15 +- discord_analyzer/analysis/neo4j_metrics.py | 242 ------------------ .../{compute_metrics.py => neo4j_metrics.py} | 6 +- discord_analyzer/analyzer/neo4j_analytics.py | 2 +- .../analyzer/utils/analyzer_db_manager.py | 23 -- discord_analyzer/rn_analyzer.py | 8 +- .../test_assess_engagement_mention.py | 4 +- .../test_assess_engagement_reactions.py | 4 +- .../test_assess_engagement_replies.py | 4 +- .../test_decentralization_score.py | 2 +- .../test_degree_centrality_multiple_guilds.py | 2 +- ...ality_multiple_guilds_preserve_parallel.py | 2 +- ...degree_centrality_parallel_preservation.py | 2 +- tests/integration/test_fragmentation_score.py | 2 +- .../test_fragmentation_score_exclude_past.py | 2 +- .../test_fragmentation_score_from_start.py | 2 +- .../test_fragmentation_score_rescaling.py | 2 +- .../test_interacted_in_deletion.py | 2 +- tests/integration/test_lcc_all_connected.py | 2 +- .../test_lcc_partially_connected.py | 2 +- .../test_louvain_algorithm_computation.py | 4 +- ...st_louvain_algorithm_get_computed_dates.py | 18 +- .../integration/test_neo4j_compute_metrics.py | 2 +- .../test_neo4j_compute_metrics_from_start.py | 2 +- ...t_neo4j_projection_utils_computed_dates.py | 2 +- tests/integration/test_node_stats.py | 2 +- tests/integration/utils/analyzer_setup.py | 10 - tests/integration/utils/mock_graph.py | 9 - tests/unit/test_creds_loading.py | 25 -- utils/credentials.py | 28 -- 36 files changed, 101 insertions(+), 442 deletions(-) delete mode 100644 discord_analyzer/analysis/neo4j_metrics.py rename discord_analyzer/analysis/neo4j_utils/{compute_metrics.py => neo4j_metrics.py} (98%) diff --git a/analyzer_init.py b/analyzer_init.py index 52a9555..9cfdcaf 100644 --- a/analyzer_init.py +++ b/analyzer_init.py @@ -1,7 +1,7 @@ from typing import Any from discord_analyzer import RnDaoAnalyzer -from utils.credentials import get_mongo_credentials, get_neo4j_credentials +from utils.credentials import get_mongo_credentials class AnalyzerInit: @@ -23,7 +23,6 @@ def get_analyzer(self) -> RnDaoAnalyzer: # credentials mongo_creds = get_mongo_credentials() - neo4j_creds = get_neo4j_credentials() analyzer.set_mongo_database_info( mongo_db_host=mongo_creds["host"], @@ -31,9 +30,7 @@ def get_analyzer(self) -> RnDaoAnalyzer: mongo_db_port=mongo_creds["port"], mongo_db_user=mongo_creds["user"], ) - analyzer.set_neo4j_database_info(neo4j_creds=neo4j_creds) analyzer.database_connect() - analyzer.setup_neo4j_metrics() return analyzer diff --git a/discord_analyzer/DB_operations/mongo_neo4j_ops.py b/discord_analyzer/DB_operations/mongo_neo4j_ops.py index 6cf781c..35f53ab 100644 --- a/discord_analyzer/DB_operations/mongo_neo4j_ops.py +++ b/discord_analyzer/DB_operations/mongo_neo4j_ops.py @@ -91,6 +91,7 @@ def store_analytics_data( guildId=guild_id, community_id=community_id, ) + print(queries_list[0]) self.run_operations_transaction( guildId=guild_id, queries_list=queries_list, diff --git a/discord_analyzer/DB_operations/network_graph.py b/discord_analyzer/DB_operations/network_graph.py index cf131c1..466d653 100644 --- a/discord_analyzer/DB_operations/network_graph.py +++ b/discord_analyzer/DB_operations/network_graph.py @@ -221,7 +221,7 @@ def create_network_query( parameters = { "node_acc_name": node_acc_name, - date_now_timestamp: int(date_now_timestamp), + "date_now_timestamp": int(date_now_timestamp), } query_str = node_str_query + ";" diff --git a/discord_analyzer/analysis/neo4j_analysis/analyzer_node_stats.py b/discord_analyzer/analysis/neo4j_analysis/analyzer_node_stats.py index 14c76b2..bfd8956 100644 --- a/discord_analyzer/analysis/neo4j_analysis/analyzer_node_stats.py +++ b/discord_analyzer/analysis/neo4j_analysis/analyzer_node_stats.py @@ -8,7 +8,7 @@ class NodeStats: - def __init__(self, neo4j_ops: Neo4jOps, threshold: int = 2) -> None: + def __init__(self, threshold: int = 2) -> None: """ initialize the Node status computations object the status could be either one of `Sender`, `Receiver`, `Balanced` @@ -27,12 +27,13 @@ def __init__(self, neo4j_ops: Neo4jOps, threshold: int = 2) -> None: - else it is balanced """ + neo4j_ops = Neo4jOps.get_instance() self.gds = neo4j_ops.gds self.driver = neo4j_ops.neo4j_driver self.threshold = threshold def compute_stats(self, guildId: str, from_start: bool) -> None: - projection_utils = ProjectionUtils(gds=self.gds, guildId=guildId) + projection_utils = ProjectionUtils(guildId=guildId) # possible dates to do the computations possible_dates = projection_utils.get_dates(guildId=guildId) @@ -80,41 +81,46 @@ def compute_node_stats_wrapper( date=date, ) natural_dc = self.gds.run_cypher( - f""" + """ CALL gds.degree.stream( - '{graph_name}', - {{ + $graph_name, + { relationshipWeightProperty: 'weight' - }} + } ) YIELD nodeId, score RETURN gds.util.asNode(nodeId).userId AS userId, score - """ + """, + { + "graph_name": graph_name, + }, ) reverse_dc = self.gds.run_cypher( - f""" + """ CALL gds.degree.stream( - '{graph_name}', - {{ + $graph_name, + { orientation: 'REVERSE', relationshipWeightProperty: 'weight' - }} + } ) YIELD nodeId, score RETURN gds.util.asNode(nodeId).userId AS userId, score - """ + """, + { + "graph_name": graph_name, + }, ) df = self.get_date_stats(natural_dc, reverse_dc, threshold=self.threshold) self.save_properties_db(guildId, df, date) _ = self.gds.run_cypher( - f""" - CALL gds.graph.drop( - "{graph_name}" - ) - """ + "CALL gds.graph.drop($graph_name)", + { + "graph_name": graph_name, + }, ) def get_computed_dates( diff --git a/discord_analyzer/analysis/neo4j_analysis/centrality.py b/discord_analyzer/analysis/neo4j_analysis/centrality.py index 5bf03b1..103897d 100644 --- a/discord_analyzer/analysis/neo4j_analysis/centrality.py +++ b/discord_analyzer/analysis/neo4j_analysis/centrality.py @@ -2,17 +2,17 @@ from typing import Literal import pandas as pd -from discord_analyzer.analysis.neo4j_metrics import Neo4JMetrics +from discord_analyzer.analysis.neo4j_utils.neo4j_metrics import Neo4JMetrics from discord_analyzer.analysis.neo4j_utils.projection_utils import ProjectionUtils from tc_neo4j_lib.neo4j_ops import Neo4jOps, Query class Centerality: - def __init__(self, neo4j_ops: Neo4jOps) -> None: + def __init__(self) -> None: """ centerality algorithms """ - self.neo4j_ops = neo4j_ops + self.neo4j_ops = Neo4jOps.get_instance() def compute_degree_centerality( self, @@ -31,8 +31,6 @@ def compute_degree_centerality( ------------ guildId : str the user nodes of guildId - gds : GraphDataScience - the gds instance to interact with DB direction : str the direction of relation could be `in_degree`, `out_degree`, `undirected` @@ -98,18 +96,19 @@ def compute_degree_centerality( results = self.neo4j_ops.gds.run_cypher( f""" {query} - WHERE r.guildId = '{guildId}' + WHERE r.guildId = $guild_id RETURN a.userId as a_userId, r.date as date, r.weight as weight, b.userId as b_userId - """ + """, + params={"guild_id": guildId}, ) dates_to_compute = set(results["date"].value_counts().index) if not from_start: - projection_utils = ProjectionUtils(gds=self.neo4j_ops.gds, guildId=guildId) + projection_utils = ProjectionUtils(guildId=guildId) dates_to_compute = self._get_dates_to_compute( projection_utils, dates_to_compute, guildId @@ -317,7 +316,7 @@ def compute_network_decentrality( from_start=from_start, ) - neo4j_metrics = Neo4JMetrics(self.neo4j_ops.gds) + neo4j_metrics = Neo4JMetrics() # saving each date network decentrality network_decentrality: dict[float, float | Literal[-1]] = {} @@ -355,7 +354,11 @@ def save_decentralization_score( MERGE (g) -[r:HAVE_METRICS {date: $date}]-> (g) SET r.decentralizationScore = $score """ - parameters = {"guild_id": guildId, "score": decentrality_score[date]} + parameters = { + "guild_id": guildId, + "score": decentrality_score[date], + "date": date, + } query = Query(query=query_str, parameters=parameters) queries.append(query) diff --git a/discord_analyzer/analysis/neo4j_analysis/local_clustering_coefficient.py b/discord_analyzer/analysis/neo4j_analysis/local_clustering_coefficient.py index 49fabfd..9fe309d 100644 --- a/discord_analyzer/analysis/neo4j_analysis/local_clustering_coefficient.py +++ b/discord_analyzer/analysis/neo4j_analysis/local_clustering_coefficient.py @@ -2,12 +2,12 @@ from uuid import uuid1 from discord_analyzer.analysis.neo4j_utils.projection_utils import ProjectionUtils -from graphdatascience import GraphDataScience +from tc_neo4j_lib import Neo4jOps class LocalClusteringCoeff: - def __init__(self, gds: GraphDataScience) -> None: - self.gds = gds + def __init__(self) -> None: + self.gds = Neo4jOps.get_instance().gds def compute(self, guildId: str, from_start: bool = False) -> None: """ @@ -17,14 +17,8 @@ def compute(self, guildId: str, from_start: bool = False) -> None: Parameters: ------------ - gds : GraphDataScience - the python GraphDataScience instance - neo4j_analytics : Neo4JMetrics object - our written Neo4JMetrics class instance - use_names : bool - whether to add user names to results - if True, the userId will be added alongside nodeId in output - default is False + guildId : str + the guild to compute the analytics for from_start : bool whether to compute the metric from the first day or not if True, then would compute from start @@ -34,7 +28,7 @@ def compute(self, guildId: str, from_start: bool = False) -> None: --------- `None` """ - projection_utils = ProjectionUtils(gds=self.gds, guildId=guildId) + projection_utils = ProjectionUtils(guildId=guildId) # Getting all possible dates computable_dates = projection_utils.get_dates(guildId=guildId) @@ -93,9 +87,12 @@ def local_clustering_computation_wrapper( # dropping the computed date _ = self.gds.run_cypher( - f""" - CALL gds.graph.drop("{graph_projected_name}") """ + CALL gds.graph.drop($graph_projected_name) + """, + { + "graph_projected_name": graph_projected_name, + }, ) def get_computed_dates( @@ -145,17 +142,22 @@ def compute_graph_lcc(self, date: float, graph_name: str, guildId: str) -> None: msg = f"GUILDID: {guildId}" try: _ = self.gds.run_cypher( - f""" + """ CALL gds.localClusteringCoefficient.stream( - "{graph_name}" + $graph_name ) YIELD nodeId, localClusteringCoefficient WITH gds.util.asNode(nodeId) as userNode, localClusteringCoefficient - MATCH (g:Guild {{guildId: '{guildId}'}}) - MERGE (userNode) -[r:INTERACTED_IN {{date: {date}}}]-> (g) + MATCH (g:Guild {guildId: $guild_id}) + MERGE (userNode) -[r:INTERACTED_IN {date: $date}]-> (g) SET r.localClusteringCoefficient = localClusteringCoefficient - """ + """, + { + "graph_name": graph_name, + "guild_id": guildId, + "date": date, + }, ) except Exception as exp: logging.error(f"{msg} error in computing localClusteringCoefficient, {exp}") diff --git a/discord_analyzer/analysis/neo4j_analysis/louvain.py b/discord_analyzer/analysis/neo4j_analysis/louvain.py index 94c8073..ada52ff 100644 --- a/discord_analyzer/analysis/neo4j_analysis/louvain.py +++ b/discord_analyzer/analysis/neo4j_analysis/louvain.py @@ -80,9 +80,12 @@ def louvain_computation_wrapper( # dropping the computed date _ = self.neo4j_ops.gds.run_cypher( - f""" - CALL gds.graph.drop("{graph_projected_name}") """ + CALL gds.graph.drop($graph_projected_name) + """, + { + "graph_projected_name": graph_projected_name, + }, ) def get_computed_dates( @@ -144,9 +147,11 @@ def compute_graph_louvain( }]-> (g) SET r.louvainModularityScore = modularity """, - graph_name=graph_name, - guild_id=guild_id, - date=date, + { + "graph_name": graph_name, + "guild_id": guild_id, + "date": date, + }, ) except Exception as exp: logging.error( diff --git a/discord_analyzer/analysis/neo4j_metrics.py b/discord_analyzer/analysis/neo4j_metrics.py deleted file mode 100644 index 8750476..0000000 --- a/discord_analyzer/analysis/neo4j_metrics.py +++ /dev/null @@ -1,242 +0,0 @@ -import os - -from discord_analyzer.analysis.neo4j_utils.compute_metrics import Neo4JMetrics -from dotenv import load_dotenv -from tc_neo4j_lib.neo4j_ops import Neo4jOps - - -def degree_centrality( - gds, - neo4j_analytics, - use_names=False, - drop_projection=True, - method="stream", - node="DiscordAccount", - relationship="INTERACTED", - relationship_orientation="NATURAL", - parallel_relationship=False, -): - """ - a sample function to show how to compute DegreeCenterality using neo4j_ops - Note: this function does not assume the relation over time - - - Parameters: - ------------ - gds : GraphDataScience - the python GraphDataScience instance - neo4j_analytics : Neo4JMetrics object - our written Neo4JMetrics class instance - use_names : bool - whether to add user names to results - if True, the userId will be added alongside nodeId in output - default is False - drop_projection : bool - drop the graph projection - default is True, which means the graph projections - will be dropped after metric computation - **Note:** Must drop the projection to be able to update results, - make it False if you want do something experimental. - method : str - whether `stream`, `stats`, `Mutate`, or `write`, default is `stream` - each has a special effect on the database, - see: https://neo4j.com/docs/graph-data-science/current/graph-catalog-node-ops/ - node : str - the node name we're computing the degree centrality for - NOTE: Important to have the node exactly like it is saved in DB. - relationship : str - the relationship name we're computing the degree centrality for - relationship_orientation : str - the relationship orientation to be assumed - either `NATURAL`, `REVERSE`, or `UNDIRECTED` - parallel_relationship : bool - whether to assume parallel relationship as one or the real count - if False, then for relationship like A -> B - and B->A the degree centrality of A and B will be 2 - else the degree centrality of A and B will be 1 - - Returns: - --------- - results : pandas dataframe - the results of metrics in pandas dataframe format - """ - - if relationship_orientation not in ["NATURAL", "REVERSE", "UNDIRECTED"]: - msg_prefix = "Wrong relationship orientation given" - msg_prefix += "should be either `NATURAL`, `REVERSE`, or `UNDIRECTED`!" - raise ValueError(f"{msg_prefix} Entered: {relationship_orientation}") - - # compute the total weight of each INTERACTED relationship - gds.run_cypher( - """MATCH (a:DiscordAccount) -[r:INTERACTED]-(:DiscordAccount) - SET r.total_weight= REDUCE(total=0, weight in r.weights | total + weight);""" - ) - - # make the relationship projection configs - relationship_projection = {} - - if parallel_relationship: - relationship_projection[f"{relationship}"] = { - "properties": {"total_weight": {"aggregation": "SUM"}}, - "orientation": f"{relationship_orientation}", - } - else: - relationship_projection[f"{relationship}"] = { - "orientation": f"{relationship_orientation}", - "properties": ["total_weight"], - } - - # first we have to apply the projection (will be saved in server memory) - G, _ = gds.graph.project("MyGraph", node, relationship_projection) - - configuration = None - if method == "write": - configuration = {"relationshipWeightProperty": "total_weight"} - - # get the results as pandas dataframe - results = neo4j_analytics.compute_degreeCenterality( - G, method=method, configuration=configuration - ) - - if use_names: - results["userId"] = results["nodeId"].apply( - lambda nodeId: dict(gds.util.asNode(nodeId))["userId"] - ) - - if drop_projection: - _ = gds.graph.drop(G) - - return results - - -def decenterialization_score(neo4j_analytics, centrality_scores): - """ - a sample function to show how the network decentrality can be computed - - Parameters: - ------------ - neo4j_analytics : Neo4JMetrics object - our written Neo4JMetrics class instance - centrality_scores : array - array of user centrality scores - - Returns: - --------- - network_decentrality : float - the decentrality score of network - """ - network_decentrality = neo4j_analytics.compute_decentralization(centrality_scores) - - return network_decentrality - - -if __name__ == "__main__": - load_dotenv() - - protocol = os.getenv("NEO4J_PROTOCOL") - host = os.getenv("NEO4J_HOST") - port = os.getenv("NEO4J_PORT") - db_name = os.getenv("NEO4J_DB") - - url = f"{protocol}://{host}:{port}" - - user, password = (os.getenv("NEO4J_USER"), os.getenv("NEO4J_PASSWORD")) - - neo4j_ops = Neo4jOps() - neo4j_ops.set_neo4j_db_info(db_name, url, user, password) - neo4j_ops.neo4j_database_connect() - - gds = neo4j_ops.gds - - neo4j_analytics = Neo4JMetrics(gds) - - results_degreeCenterality = degree_centrality( - gds, - neo4j_analytics=neo4j_analytics, - use_names=True, - drop_projection=True, - method="stream", - node="DiscordAccount", - relationship="INTERACTED", - relationship_orientation="UNDIRECTED", - parallel_relationship=True, - ) - - # finding the output relationship counts from a node - results_degreeCentrality_OUT = degree_centrality( - gds, - neo4j_analytics=neo4j_analytics, - use_names=True, - drop_projection=True, - method="stream", - node="DiscordAccount", - relationship="INTERACTED", - relationship_orientation="NATURAL", - # parallel_relationship = True - ) - # finding the input relationship counts to a node - results_degreeCentrality_IN = degree_centrality( - gds, - neo4j_analytics=neo4j_analytics, - use_names=True, - drop_projection=True, - method="stream", - node="DiscordAccount", - relationship="INTERACTED", - relationship_orientation="REVERSE", - # parallel_relationship = True - ) - - # what guilds to find isolated nodes - guildId_arr = ["123456789101112", "993163081939165234", "1012430565959553145"] - results_isolated_discordNodes = neo4j_analytics.compute_isolated_nodes( - guildId=guildId_arr - ) - results_isolation_fraction = neo4j_analytics.compute_isolated_nodes_fraction( - guildId=guildId_arr - ) - results_network_density = neo4j_analytics.compute_network_density( - guildId=guildId_arr - ) - - # adding the scores in and scores out - # to pandas dataframe of `results_degreeCenterality` - results_degreeCenterality["score_in"] = results_degreeCentrality_IN["score"] - results_degreeCenterality["score_out"] = results_degreeCentrality_OUT["score"] - results_degreeCenterality["score_undirected"] = results_degreeCenterality["score"] - - # normalizing undirected scores - results_degreeCenterality["normalized_score_undirected"] = ( - results_degreeCenterality["score"] - / sum(results_degreeCenterality["score"].values > 0) - ) - # the normalization over positive score_out - results_degreeCenterality["normalized_score_out"] = results_degreeCenterality[ - "score_out" - ] / sum(results_degreeCenterality["score_out"].values > 0) - # the normalization over positive score_in - results_degreeCenterality["normalized_score_in"] = results_degreeCenterality[ - "score_in" - ] / sum(results_degreeCenterality["score_in"].values > 0) - - results_decentralityScore = decenterialization_score( - neo4j_analytics=neo4j_analytics, - centrality_scores=results_degreeCenterality[ - "normalized_score_undirected" - ].values, - ) - - print("------------------ Degree Centerality ------------------") - print(results_degreeCenterality, "\n") - - print("------------------ Network Decentrality Score ------------------") - print(results_decentralityScore, "\n") - - print("------------------ Isolated Nodes ------------------") - print(f"Isolated Nodes in guilds: {guildId_arr}") - print(results_isolated_discordNodes, "\n") - print("Isolation fraction: ", results_isolation_fraction, "\n") - - print("------------------ Network Density ------------------") - print(f"Network Density for guilds: {guildId_arr}") - print(results_network_density) diff --git a/discord_analyzer/analysis/neo4j_utils/compute_metrics.py b/discord_analyzer/analysis/neo4j_utils/neo4j_metrics.py similarity index 98% rename from discord_analyzer/analysis/neo4j_utils/compute_metrics.py rename to discord_analyzer/analysis/neo4j_utils/neo4j_metrics.py index 0dea17b..5d2ea89 100644 --- a/discord_analyzer/analysis/neo4j_utils/compute_metrics.py +++ b/discord_analyzer/analysis/neo4j_utils/neo4j_metrics.py @@ -2,11 +2,11 @@ from typing import Literal import numpy as np -from graphdatascience import GraphDataScience +from tc_neo4j_lib.neo4j_ops import Neo4jOps class Neo4JMetrics: - def __init__(self, gds: GraphDataScience) -> None: + def __init__(self) -> None: """ computation of Neo4J metrics @@ -15,7 +15,7 @@ def __init__(self, gds: GraphDataScience) -> None: gds : GraphDataScience the GraphDataScience instance to query the DB """ - self.gds = gds + self.gds = Neo4jOps.get_instance().gds def compute_degreeCenterality(self, graphProjection, method, configuration=None): """ diff --git a/discord_analyzer/analyzer/neo4j_analytics.py b/discord_analyzer/analyzer/neo4j_analytics.py index 3063662..c2b0540 100644 --- a/discord_analyzer/analyzer/neo4j_analytics.py +++ b/discord_analyzer/analyzer/neo4j_analytics.py @@ -61,7 +61,7 @@ def compute_local_clustering_coefficient( try: # Local Clustering Coefficient logging.info(f"{msg} Computing LocalClusteringCoefficient") - lcc = LocalClusteringCoeff(gds=self.neo4j_ops.gds) + lcc = LocalClusteringCoeff() lcc.compute(guildId=guildId, from_start=from_start) except Exception as exp: logging.error( diff --git a/discord_analyzer/analyzer/utils/analyzer_db_manager.py b/discord_analyzer/analyzer/utils/analyzer_db_manager.py index 53abee6..443a2db 100644 --- a/discord_analyzer/analyzer/utils/analyzer_db_manager.py +++ b/discord_analyzer/analyzer/utils/analyzer_db_manager.py @@ -27,29 +27,6 @@ def set_mongo_database_info( self.connection_str = f"mongodb://{self.mongo_user}:{self.mongo_pass}@{self.mongo_host}:{self.mongo_port}" - def set_neo4j_database_info(self, neo4j_creds: dict[str, Any]): - """ - set neo4J database informations - - Parameters: - ------------- - neo4j_creds : dict[str, Any] - neo4j_credentials to connect - the keys should be - - db_name: str - - protocol: str - - host: str - - port: int - - user: str - - password: str - """ - self.neo4j_db_name = neo4j_creds["db_name"] - self.neo4j_protocol = neo4j_creds["protocol"] - self.neo4j_host = neo4j_creds["host"] - self.neo4j_port = neo4j_creds["port"] - self.neo4j_user = neo4j_creds["user"] - self.neo4j_password = neo4j_creds["password"] - def database_connect(self): """ Connect to the database diff --git a/discord_analyzer/rn_analyzer.py b/discord_analyzer/rn_analyzer.py index 9a76629..82a6827 100644 --- a/discord_analyzer/rn_analyzer.py +++ b/discord_analyzer/rn_analyzer.py @@ -22,17 +22,11 @@ def __init__(self, guild_id: str, testing=False): logging.getLogger().setLevel(logging.INFO) self.testing = testing + self.neo4j_analytics = Neo4JAnalytics() self.guild_object = Guild(guild_id) self.guild_id = guild_id self.community_id = self.guild_object.get_community_id() - def setup_neo4j_metrics(self) -> None: - """ - setup the neo4j analytics wrapper - """ - - self.neo4j_analytics = Neo4JAnalytics() - def run_once(self): """Run analysis once (Wrapper)""" # check if the guild was available diff --git a/tests/integration/test_assess_engagement_mention.py b/tests/integration/test_assess_engagement_mention.py index e97c35c..7c7ca3e 100644 --- a/tests/integration/test_assess_engagement_mention.py +++ b/tests/integration/test_assess_engagement_mention.py @@ -5,7 +5,7 @@ from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from tc_core_analyzer_lib.utils.activity import DiscordActivity -from utils.credentials import get_mongo_credentials, get_neo4j_credentials +from utils.credentials import get_mongo_credentials from .utils.analyzer_setup import launch_db_access from .utils.remove_and_setup_guild import setup_db_guild @@ -26,8 +26,6 @@ def create_db_connections(self): mongo_db_host=mongo_creds["host"], mongo_db_port=mongo_creds["port"], ) - neo4j_creds = get_neo4j_credentials() - base_analyzer.set_neo4j_database_info(neo4j_creds) base_analyzer.database_connect() self.db_connections = base_analyzer.DB_connections diff --git a/tests/integration/test_assess_engagement_reactions.py b/tests/integration/test_assess_engagement_reactions.py index cc79e92..318ca3d 100644 --- a/tests/integration/test_assess_engagement_reactions.py +++ b/tests/integration/test_assess_engagement_reactions.py @@ -5,7 +5,7 @@ from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from tc_core_analyzer_lib.utils.activity import DiscordActivity -from utils.credentials import get_mongo_credentials, get_neo4j_credentials +from utils.credentials import get_mongo_credentials from .utils.analyzer_setup import launch_db_access from .utils.remove_and_setup_guild import setup_db_guild @@ -26,8 +26,6 @@ def create_db_connections(self): mongo_db_host=mongo_creds["host"], mongo_db_port=mongo_creds["port"], ) - neo4j_creds = get_neo4j_credentials() - base_analyzer.set_neo4j_database_info(neo4j_creds) base_analyzer.database_connect() self.db_connections = base_analyzer.DB_connections diff --git a/tests/integration/test_assess_engagement_replies.py b/tests/integration/test_assess_engagement_replies.py index d52c00a..c971085 100644 --- a/tests/integration/test_assess_engagement_replies.py +++ b/tests/integration/test_assess_engagement_replies.py @@ -5,7 +5,7 @@ from discord_analyzer.analyzer.analyzer_heatmaps import Heatmaps from discord_analyzer.analyzer.utils.analyzer_db_manager import AnalyzerDBManager from tc_core_analyzer_lib.utils.activity import DiscordActivity -from utils.credentials import get_mongo_credentials, get_neo4j_credentials +from utils.credentials import get_mongo_credentials from .utils.analyzer_setup import launch_db_access from .utils.remove_and_setup_guild import setup_db_guild @@ -26,8 +26,6 @@ def create_db_connections(self): mongo_db_host=mongo_creds["host"], mongo_db_port=mongo_creds["port"], ) - neo4j_creds = get_neo4j_credentials() - base_analyzer.set_neo4j_database_info(neo4j_creds) base_analyzer.database_connect() self.db_connections = base_analyzer.DB_connections diff --git a/tests/integration/test_decentralization_score.py b/tests/integration/test_decentralization_score.py index 25b4732..9a49f55 100644 --- a/tests/integration/test_decentralization_score.py +++ b/tests/integration/test_decentralization_score.py @@ -14,7 +14,7 @@ def test_decentralization_score(): guildId = "1234" neo4j_ops = Neo4jOps.get_instance() - centrality = Centerality(neo4j_ops) + centrality = Centerality() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_degree_centrality_multiple_guilds.py b/tests/integration/test_degree_centrality_multiple_guilds.py index 141b93b..e819ae0 100644 --- a/tests/integration/test_degree_centrality_multiple_guilds.py +++ b/tests/integration/test_degree_centrality_multiple_guilds.py @@ -75,7 +75,7 @@ def test_multiple_guilds(): SET r14.guildId = '{guildId2}' """ ) - centrality = Centerality(neo4j_ops) + centrality = Centerality() degree_centrality = centrality.compute_degree_centerality( guildId=guildId2, direction="undirected", diff --git a/tests/integration/test_degree_centrality_multiple_guilds_preserve_parallel.py b/tests/integration/test_degree_centrality_multiple_guilds_preserve_parallel.py index 0434914..1955e7f 100644 --- a/tests/integration/test_degree_centrality_multiple_guilds_preserve_parallel.py +++ b/tests/integration/test_degree_centrality_multiple_guilds_preserve_parallel.py @@ -17,7 +17,7 @@ def test_multiple_guilds_preserve_parallel(): guildId = "1234" neo4j_ops = Neo4jOps.get_instance() - centrality = Centerality(neo4j_ops) + centrality = Centerality() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_degree_centrality_parallel_preservation.py b/tests/integration/test_degree_centrality_parallel_preservation.py index 1c3e0e9..b43e09b 100644 --- a/tests/integration/test_degree_centrality_parallel_preservation.py +++ b/tests/integration/test_degree_centrality_parallel_preservation.py @@ -63,7 +63,7 @@ def test_partially_connected_coeffs(): SET r12.guildId = '{guildId}' """ ) - centrality = Centerality(neo4j_ops) + centrality = Centerality() degree_centrality = centrality.compute_degree_centerality( guildId=guildId, direction="undirected", diff --git a/tests/integration/test_fragmentation_score.py b/tests/integration/test_fragmentation_score.py index 1708e68..2957c91 100644 --- a/tests/integration/test_fragmentation_score.py +++ b/tests/integration/test_fragmentation_score.py @@ -10,7 +10,7 @@ def test_avg_clustering_coeff(): """ neo4j_ops = Neo4jOps.get_instance() - neo4j_analytics = Neo4JAnalytics(neo4j_ops) + neo4j_analytics = Neo4JAnalytics() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_fragmentation_score_exclude_past.py b/tests/integration/test_fragmentation_score_exclude_past.py index 40ba8f9..0f03a29 100644 --- a/tests/integration/test_fragmentation_score_exclude_past.py +++ b/tests/integration/test_fragmentation_score_exclude_past.py @@ -10,7 +10,7 @@ def test_avg_clustering_exclude_past(): """ neo4j_ops = Neo4jOps.get_instance() - neo4j_analytics = Neo4JAnalytics(neo4j_ops) + neo4j_analytics = Neo4JAnalytics() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_fragmentation_score_from_start.py b/tests/integration/test_fragmentation_score_from_start.py index 404bce0..1f11ca8 100644 --- a/tests/integration/test_fragmentation_score_from_start.py +++ b/tests/integration/test_fragmentation_score_from_start.py @@ -10,7 +10,7 @@ def test_avg_clustering_coeff_from_start(): """ neo4j_ops = Neo4jOps.get_instance() - neo4j_analytics = Neo4JAnalytics(neo4j_ops) + neo4j_analytics = Neo4JAnalytics() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_fragmentation_score_rescaling.py b/tests/integration/test_fragmentation_score_rescaling.py index 607df8f..020f6c6 100644 --- a/tests/integration/test_fragmentation_score_rescaling.py +++ b/tests/integration/test_fragmentation_score_rescaling.py @@ -10,7 +10,7 @@ def test_avg_clustering_coeff_scaling(): """ neo4j_ops = Neo4jOps.get_instance() - neo4j_analytics = Neo4JAnalytics(neo4j_ops) + neo4j_analytics = Neo4JAnalytics() # deleting all data neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_interacted_in_deletion.py b/tests/integration/test_interacted_in_deletion.py index df09d1f..9afb481 100644 --- a/tests/integration/test_interacted_in_deletion.py +++ b/tests/integration/test_interacted_in_deletion.py @@ -9,7 +9,7 @@ def test_interacted_in_deletion(): """ neo4j_ops = Neo4jOps.get_instance() - neo4j_analytics = Neo4JAnalytics(neo4j_ops) + neo4j_analytics = Neo4JAnalytics() neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") diff --git a/tests/integration/test_lcc_all_connected.py b/tests/integration/test_lcc_all_connected.py index 0955f91..5805944 100644 --- a/tests/integration/test_lcc_all_connected.py +++ b/tests/integration/test_lcc_all_connected.py @@ -44,7 +44,7 @@ def test_all_connected_coeffs(): SET r4.guildId = '{guildId}' """ ) - lcc = LocalClusteringCoeff(gds=neo4j_ops.gds) + lcc = LocalClusteringCoeff() lcc.compute(guildId=guildId, from_start=True) # getting the results diff --git a/tests/integration/test_lcc_partially_connected.py b/tests/integration/test_lcc_partially_connected.py index 5cd81e7..11da6be 100644 --- a/tests/integration/test_lcc_partially_connected.py +++ b/tests/integration/test_lcc_partially_connected.py @@ -63,7 +63,7 @@ def test_partially_connected_coeffs(): SET r12.guildId = '{guildId}' """ ) - lcc = LocalClusteringCoeff(gds=neo4j_ops.gds) + lcc = LocalClusteringCoeff() lcc.compute(guildId=guildId) # getting the results diff --git a/tests/integration/test_louvain_algorithm_computation.py b/tests/integration/test_louvain_algorithm_computation.py index 94141d3..c8018bb 100644 --- a/tests/integration/test_louvain_algorithm_computation.py +++ b/tests/integration/test_louvain_algorithm_computation.py @@ -35,7 +35,7 @@ def test_louvain_algorithm_available_data(): SET r4.guildId = '{guild_id}' """ ) - louvain = Louvain(neo4j_ops) + louvain = Louvain() louvain.compute(guild_id=guild_id, from_start=False) @@ -104,7 +104,7 @@ def test_louvain_algorithm_more_available_data(): SET r12.guildId = '{guild_id}' """ ) - louvain = Louvain(neo4j_ops) + louvain = Louvain() louvain.compute(guild_id=guild_id, from_start=False) diff --git a/tests/integration/test_louvain_algorithm_get_computed_dates.py b/tests/integration/test_louvain_algorithm_get_computed_dates.py index c1c6cbe..8e251bf 100644 --- a/tests/integration/test_louvain_algorithm_get_computed_dates.py +++ b/tests/integration/test_louvain_algorithm_get_computed_dates.py @@ -36,10 +36,8 @@ def test_louvain_get_computed_dates_empty_data(): SET r4.guildId = '{guild_id}' """ ) - louvain = Louvain(neo4j_ops) - projection_utils = ProjectionUtils(neo4j_ops.gds, guildId=guild_id) - - projection_utils = ProjectionUtils(gds=neo4j_ops.gds, guildId=guild_id) + louvain = Louvain() + projection_utils = ProjectionUtils(guildId=guild_id) computed_dates = louvain.get_computed_dates(projection_utils, guildId=guild_id) @@ -79,10 +77,8 @@ def test_louvain_get_computed_dates_empty_data_with_have_metrics_relation(): SET r4.guildId = '{guild_id}' """ ) - louvain = Louvain(neo4j_ops) - projection_utils = ProjectionUtils(neo4j_ops.gds, guildId=guild_id) - - projection_utils = ProjectionUtils(gds=neo4j_ops.gds, guildId=guild_id) + louvain = Louvain() + projection_utils = ProjectionUtils(guildId=guild_id) computed_dates = louvain.get_computed_dates(projection_utils, guildId=guild_id) @@ -122,10 +118,8 @@ def test_louvain_get_computed_dates_one_data(): SET r4.guildId = '{guild_id}' """ ) - louvain = Louvain(neo4j_ops) - projection_utils = ProjectionUtils(neo4j_ops.gds, guildId=guild_id) - - projection_utils = ProjectionUtils(gds=neo4j_ops.gds, guildId=guild_id) + louvain = Louvain() + projection_utils = ProjectionUtils(guildId=guild_id) computed_dates = louvain.get_computed_dates(projection_utils, guildId=guild_id) diff --git a/tests/integration/test_neo4j_compute_metrics.py b/tests/integration/test_neo4j_compute_metrics.py index 62e4c8b..f22f0e3 100644 --- a/tests/integration/test_neo4j_compute_metrics.py +++ b/tests/integration/test_neo4j_compute_metrics.py @@ -62,7 +62,7 @@ def test_guild_results_available(): """ ) - analytics = Neo4JAnalytics(neo4j_ops) + analytics = Neo4JAnalytics() analytics.compute_metrics(guildId=guildId, from_start=False) diff --git a/tests/integration/test_neo4j_compute_metrics_from_start.py b/tests/integration/test_neo4j_compute_metrics_from_start.py index 1233232..5fb92b0 100644 --- a/tests/integration/test_neo4j_compute_metrics_from_start.py +++ b/tests/integration/test_neo4j_compute_metrics_from_start.py @@ -62,7 +62,7 @@ def test_neo4j_compute_metrics_from_start(): """ ) - analytics = Neo4JAnalytics(neo4j_ops) + analytics = Neo4JAnalytics() analytics.compute_metrics(guildId=guildId, from_start=True) diff --git a/tests/integration/test_neo4j_projection_utils_computed_dates.py b/tests/integration/test_neo4j_projection_utils_computed_dates.py index 9804859..bd5a754 100644 --- a/tests/integration/test_neo4j_projection_utils_computed_dates.py +++ b/tests/integration/test_neo4j_projection_utils_computed_dates.py @@ -1,6 +1,6 @@ from discord_analyzer.analysis.neo4j_utils.projection_utils import ProjectionUtils -# from tc_neo4j_lib.neo4j_ops import Neo4jOps +from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_neo4j_projection_utils_get_computed_dates(): diff --git a/tests/integration/test_node_stats.py b/tests/integration/test_node_stats.py index 5bda02b..5c82840 100644 --- a/tests/integration/test_node_stats.py +++ b/tests/integration/test_node_stats.py @@ -62,7 +62,7 @@ def test_node_stats(): """ ) - node_stats = NodeStats(neo4j_ops, threshold=2) + node_stats = NodeStats(threshold=2) node_stats.compute_stats(guildId="1234", from_start=True) # getting the results diff --git a/tests/integration/utils/analyzer_setup.py b/tests/integration/utils/analyzer_setup.py index f7d3d09..c634549 100644 --- a/tests/integration/utils/analyzer_setup.py +++ b/tests/integration/utils/analyzer_setup.py @@ -17,14 +17,6 @@ def setup_analyzer( host = os.getenv("MONGODB_HOST", "") port = os.getenv("MONGODB_PORT", "") - neo4j_creds = {} - neo4j_creds["db_name"] = os.getenv("NEO4J_DB", "") - neo4j_creds["protocol"] = os.getenv("NEO4J_PROTOCOL", "") - neo4j_creds["host"] = os.getenv("NEO4J_HOST", "") - neo4j_creds["port"] = os.getenv("NEO4J_PORT", "") - neo4j_creds["password"] = os.getenv("NEO4J_PASSWORD", "") - neo4j_creds["user"] = os.getenv("NEO4J_USER", "") - analyzer.set_mongo_database_info( mongo_db_host=host, mongo_db_password=password, @@ -32,9 +24,7 @@ def setup_analyzer( mongo_db_port=port, ) - analyzer.set_neo4j_database_info(neo4j_creds=neo4j_creds) analyzer.database_connect() - analyzer.setup_neo4j_metrics() return analyzer diff --git a/tests/integration/utils/mock_graph.py b/tests/integration/utils/mock_graph.py index 2473db0..27c531b 100644 --- a/tests/integration/utils/mock_graph.py +++ b/tests/integration/utils/mock_graph.py @@ -72,14 +72,6 @@ def store_mock_data_in_neo4j(graph_dict, guildId, community_id): host = os.getenv("MONGODB_HOST") port = os.getenv("MONGODB_PORT") - neo4j_creds = {} - neo4j_creds["db_name"] = os.getenv("NEO4J_DB") - neo4j_creds["protocol"] = os.getenv("NEO4J_PROTOCOL") - neo4j_creds["host"] = os.getenv("NEO4J_HOST") - neo4j_creds["port"] = os.getenv("NEO4J_PORT") - neo4j_creds["password"] = os.getenv("NEO4J_PASSWORD") - neo4j_creds["user"] = os.getenv("NEO4J_USER") - analyzer = RnDaoAnalyzer(guildId) analyzer.set_mongo_database_info( @@ -88,7 +80,6 @@ def store_mock_data_in_neo4j(graph_dict, guildId, community_id): mongo_db_user=user, mongo_db_port=port, ) - analyzer.set_neo4j_database_info(neo4j_creds=neo4j_creds) analyzer.database_connect() guilds_data = {} diff --git a/tests/unit/test_creds_loading.py b/tests/unit/test_creds_loading.py index 745b1c0..2d35349 100644 --- a/tests/unit/test_creds_loading.py +++ b/tests/unit/test_creds_loading.py @@ -1,6 +1,5 @@ from utils.credentials import ( get_mongo_credentials, - get_neo4j_credentials, get_rabbit_mq_credentials, get_redis_credentials, get_sentryio_service_creds, @@ -50,30 +49,6 @@ def test_rabbit_creds_values(): assert rabbit_creds["username"] is not None -def test_no4j_creds_keys(): - neo4j_creds = get_neo4j_credentials() - - credential_keys = list(neo4j_creds.keys()) - - assert "user" in credential_keys - assert "password" in credential_keys - assert "db_name" in credential_keys - assert "protocol" in credential_keys - assert "port" in credential_keys - assert "host" in credential_keys - - -def test_neo4j_creds_values(): - neo4j_creds = get_neo4j_credentials() - - assert neo4j_creds["user"] is not None - assert neo4j_creds["password"] is not None - assert neo4j_creds["protocol"] is not None - assert neo4j_creds["port"] is not None - assert neo4j_creds["db_name"] is not None - assert neo4j_creds["host"] is not None - - def test_redis_creds_keys(): redis_creds = get_redis_credentials() diff --git a/utils/credentials.py b/utils/credentials.py index 74b1d0a..3cb782c 100644 --- a/utils/credentials.py +++ b/utils/credentials.py @@ -64,34 +64,6 @@ def get_mongo_credentials(): return mongo_creds -def get_neo4j_credentials(): - """ - load neo4j credentials from .env - - Returns: - --------- - neo4j_creds : dict[str, Any] - neo4j credentials - a dictionary representive of - `user` : str - `pass` : str - `db_name` : str - `url` : str - """ - - load_dotenv() - - neo4j_creds = {} - neo4j_creds["db_name"] = os.getenv("NEO4J_DB") - neo4j_creds["protocol"] = os.getenv("NEO4J_PROTOCOL") - neo4j_creds["host"] = os.getenv("NEO4J_HOST") - neo4j_creds["port"] = os.getenv("NEO4J_PORT") - neo4j_creds["password"] = os.getenv("NEO4J_PASSWORD") - neo4j_creds["user"] = os.getenv("NEO4J_USER") - - return neo4j_creds - - def get_sentryio_service_creds(): load_dotenv() From f72629877b6e6df6768156ad80fb5a821b5fa56b Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Thu, 23 May 2024 10:21:03 +0330 Subject: [PATCH 42/48] feat: increasing the neo4j backend lib version! --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 26dc353..4389aba 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,6 +23,6 @@ sentry-sdk rq redis tc-core-analyzer-lib==1.3.1 -tc-neo4j-lib==1.0.2 +tc-neo4j-lib==2.0.0 pybars3 backoff==2.2.1 From 82b7ec3a67711cebe05e68343efeda13a9be6d42 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Thu, 23 May 2024 10:21:42 +0330 Subject: [PATCH 43/48] fix: wrong branch to increase the version! --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 4389aba..26dc353 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,6 +23,6 @@ sentry-sdk rq redis tc-core-analyzer-lib==1.3.1 -tc-neo4j-lib==2.0.0 +tc-neo4j-lib==1.0.2 pybars3 backoff==2.2.1 From f1ca0d68e859aa72e877e608c2a421abe08ce2ae Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Thu, 23 May 2024 10:22:20 +0330 Subject: [PATCH 44/48] fix: increase neo4j lib version! --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 26dc353..4389aba 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,6 +23,6 @@ sentry-sdk rq redis tc-core-analyzer-lib==1.3.1 -tc-neo4j-lib==1.0.2 +tc-neo4j-lib==2.0.0 pybars3 backoff==2.2.1 From 4c4a2c86be4482b5a9ccff7dc2bebd480e108994 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Thu, 23 May 2024 10:36:06 +0330 Subject: [PATCH 45/48] fix: adding logs to the check the status! --- worker.py | 1 + 1 file changed, 1 insertion(+) diff --git a/worker.py b/worker.py index b2e2eb4..414e24d 100644 --- a/worker.py +++ b/worker.py @@ -20,6 +20,7 @@ def worker_exception_handler(job, exc_type, exc_value, traceback): worker = Worker( queues=["default"], connection=r, exception_handlers=worker_exception_handler ) + logging.info(f"Registered the worker!") try: worker.work(with_scheduler=True, max_jobs=1) except KeyboardInterrupt: From a83c817bc6e7fd46f254d3352908c3478b3da298 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Thu, 23 May 2024 10:45:02 +0330 Subject: [PATCH 46/48] fix: lint issues! --- .../analysis/neo4j_utils/projection_utils.py | 1 + .../analyzer/utils/analyzer_db_manager.py | 2 -- ..._generated_graph_period_1_year_run_once.py | 2 +- .../test_generated_graph_period_1year.py | 2 +- .../test_generated_graph_period_35_days.py | 3 +- ...generated_graph_period_35_days_run_once.py | 3 +- .../test_generated_graph_period_3_months.py | 3 +- ...enerated_graph_period_3_months_run_once.py | 3 +- .../test_generated_graph_period_6_months.py | 3 +- ...enerated_graph_period_6_months_run_once.py | 3 +- .../test_interacted_in_deletion.py | 1 - tests/integration/test_lcc_all_connected.py | 1 - .../test_lcc_partially_connected.py | 1 - .../test_louvain_algorithm_computation.py | 1 - ...st_louvain_algorithm_get_computed_dates.py | 1 - .../integration/test_neo4j_compute_metrics.py | 1 - .../test_neo4j_compute_metrics_from_start.py | 1 - ...t_neo4j_projection_utils_computed_dates.py | 1 - .../test_network_graph_creation.py | 2 +- tests/integration/test_node_stats.py | 1 - tests/integration/utils/neo4j_conn.py | 28 ------------------- 21 files changed, 10 insertions(+), 54 deletions(-) delete mode 100644 tests/integration/utils/neo4j_conn.py diff --git a/discord_analyzer/analysis/neo4j_utils/projection_utils.py b/discord_analyzer/analysis/neo4j_utils/projection_utils.py index 3201071..b67e7c8 100644 --- a/discord_analyzer/analysis/neo4j_utils/projection_utils.py +++ b/discord_analyzer/analysis/neo4j_utils/projection_utils.py @@ -1,4 +1,5 @@ import logging + from tc_neo4j_lib.neo4j_ops import Neo4jOps diff --git a/discord_analyzer/analyzer/utils/analyzer_db_manager.py b/discord_analyzer/analyzer/utils/analyzer_db_manager.py index 443a2db..190d3d7 100644 --- a/discord_analyzer/analyzer/utils/analyzer_db_manager.py +++ b/discord_analyzer/analyzer/utils/analyzer_db_manager.py @@ -1,5 +1,3 @@ -from typing import Any - from discord_analyzer.DB_operations.mongo_neo4j_ops import MongoNeo4jDB diff --git a/tests/integration/test_generated_graph_period_1_year_run_once.py b/tests/integration/test_generated_graph_period_1_year_run_once.py index 8ba7556..31fdc2e 100644 --- a/tests/integration/test_generated_graph_period_1_year_run_once.py +++ b/tests/integration/test_generated_graph_period_1_year_run_once.py @@ -1,11 +1,11 @@ from datetime import datetime, timedelta, timezone import numpy as np +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data -from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_generated_graph_period_1year.py b/tests/integration/test_generated_graph_period_1year.py index 3404075..bfe2911 100644 --- a/tests/integration/test_generated_graph_period_1year.py +++ b/tests/integration/test_generated_graph_period_1year.py @@ -1,11 +1,11 @@ from datetime import datetime, timedelta, timezone import numpy as np +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data -from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_generated_graph_period_35_days.py b/tests/integration/test_generated_graph_period_35_days.py index 0d20b41..1878ba3 100644 --- a/tests/integration/test_generated_graph_period_35_days.py +++ b/tests/integration/test_generated_graph_period_35_days.py @@ -1,12 +1,11 @@ from datetime import datetime, timedelta, timezone import numpy as np +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data - -from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_generated_graph_period_35_days_run_once.py b/tests/integration/test_generated_graph_period_35_days_run_once.py index 8e06333..ed7109e 100644 --- a/tests/integration/test_generated_graph_period_35_days_run_once.py +++ b/tests/integration/test_generated_graph_period_35_days_run_once.py @@ -1,12 +1,11 @@ from datetime import datetime, timedelta, timezone import numpy as np +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data - -from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_generated_graph_period_3_months.py b/tests/integration/test_generated_graph_period_3_months.py index ddae3dc..365f55f 100644 --- a/tests/integration/test_generated_graph_period_3_months.py +++ b/tests/integration/test_generated_graph_period_3_months.py @@ -1,12 +1,11 @@ from datetime import datetime, timedelta, timezone import numpy as np +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data - -from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_generated_graph_period_3_months_run_once.py b/tests/integration/test_generated_graph_period_3_months_run_once.py index 9d71b55..34764fd 100644 --- a/tests/integration/test_generated_graph_period_3_months_run_once.py +++ b/tests/integration/test_generated_graph_period_3_months_run_once.py @@ -1,12 +1,11 @@ from datetime import datetime, timedelta, timezone import numpy as np +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data - -from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_generated_graph_period_6_months.py b/tests/integration/test_generated_graph_period_6_months.py index 834f615..3434bda 100644 --- a/tests/integration/test_generated_graph_period_6_months.py +++ b/tests/integration/test_generated_graph_period_6_months.py @@ -1,12 +1,11 @@ from datetime import datetime, timedelta, timezone import numpy as np +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data - -from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_generated_graph_period_6_months_run_once.py b/tests/integration/test_generated_graph_period_6_months_run_once.py index a655fac..4865eeb 100644 --- a/tests/integration/test_generated_graph_period_6_months_run_once.py +++ b/tests/integration/test_generated_graph_period_6_months_run_once.py @@ -1,12 +1,11 @@ from datetime import datetime, timedelta, timezone import numpy as np +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.analyzer_setup import launch_db_access, setup_analyzer from .utils.mock_heatmaps import create_empty_heatmaps_data from .utils.mock_memberactivities import create_empty_memberactivities_data - -from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.remove_and_setup_guild import setup_db_guild diff --git a/tests/integration/test_interacted_in_deletion.py b/tests/integration/test_interacted_in_deletion.py index 9afb481..04b7d49 100644 --- a/tests/integration/test_interacted_in_deletion.py +++ b/tests/integration/test_interacted_in_deletion.py @@ -1,5 +1,4 @@ from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics - from tc_neo4j_lib.neo4j_ops import Neo4jOps diff --git a/tests/integration/test_lcc_all_connected.py b/tests/integration/test_lcc_all_connected.py index 5805944..9c24599 100644 --- a/tests/integration/test_lcc_all_connected.py +++ b/tests/integration/test_lcc_all_connected.py @@ -2,7 +2,6 @@ from discord_analyzer.analysis.neo4j_analysis.local_clustering_coefficient import ( LocalClusteringCoeff, ) - from tc_neo4j_lib.neo4j_ops import Neo4jOps diff --git a/tests/integration/test_lcc_partially_connected.py b/tests/integration/test_lcc_partially_connected.py index 11da6be..73e6b5b 100644 --- a/tests/integration/test_lcc_partially_connected.py +++ b/tests/integration/test_lcc_partially_connected.py @@ -2,7 +2,6 @@ from discord_analyzer.analysis.neo4j_analysis.local_clustering_coefficient import ( LocalClusteringCoeff, ) - from tc_neo4j_lib.neo4j_ops import Neo4jOps diff --git a/tests/integration/test_louvain_algorithm_computation.py b/tests/integration/test_louvain_algorithm_computation.py index c8018bb..2f6bef0 100644 --- a/tests/integration/test_louvain_algorithm_computation.py +++ b/tests/integration/test_louvain_algorithm_computation.py @@ -1,5 +1,4 @@ from discord_analyzer.analysis.neo4j_analysis.louvain import Louvain - from tc_neo4j_lib.neo4j_ops import Neo4jOps diff --git a/tests/integration/test_louvain_algorithm_get_computed_dates.py b/tests/integration/test_louvain_algorithm_get_computed_dates.py index 8e251bf..01114c2 100644 --- a/tests/integration/test_louvain_algorithm_get_computed_dates.py +++ b/tests/integration/test_louvain_algorithm_get_computed_dates.py @@ -1,6 +1,5 @@ from discord_analyzer.analysis.neo4j_analysis.louvain import Louvain from discord_analyzer.analysis.neo4j_utils.projection_utils import ProjectionUtils - from tc_neo4j_lib.neo4j_ops import Neo4jOps diff --git a/tests/integration/test_neo4j_compute_metrics.py b/tests/integration/test_neo4j_compute_metrics.py index f22f0e3..0cd203c 100644 --- a/tests/integration/test_neo4j_compute_metrics.py +++ b/tests/integration/test_neo4j_compute_metrics.py @@ -1,6 +1,5 @@ import numpy as np from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics - from tc_neo4j_lib.neo4j_ops import Neo4jOps diff --git a/tests/integration/test_neo4j_compute_metrics_from_start.py b/tests/integration/test_neo4j_compute_metrics_from_start.py index 5fb92b0..24b1faf 100644 --- a/tests/integration/test_neo4j_compute_metrics_from_start.py +++ b/tests/integration/test_neo4j_compute_metrics_from_start.py @@ -1,6 +1,5 @@ import numpy as np from discord_analyzer.analyzer.neo4j_analytics import Neo4JAnalytics - from tc_neo4j_lib.neo4j_ops import Neo4jOps diff --git a/tests/integration/test_neo4j_projection_utils_computed_dates.py b/tests/integration/test_neo4j_projection_utils_computed_dates.py index bd5a754..b359e55 100644 --- a/tests/integration/test_neo4j_projection_utils_computed_dates.py +++ b/tests/integration/test_neo4j_projection_utils_computed_dates.py @@ -1,5 +1,4 @@ from discord_analyzer.analysis.neo4j_utils.projection_utils import ProjectionUtils - from tc_neo4j_lib.neo4j_ops import Neo4jOps diff --git a/tests/integration/test_network_graph_creation.py b/tests/integration/test_network_graph_creation.py index 6464282..312404c 100644 --- a/tests/integration/test_network_graph_creation.py +++ b/tests/integration/test_network_graph_creation.py @@ -4,9 +4,9 @@ import networkx as nx import numpy as np from discord_analyzer.analysis.utils.activity import Activity +from tc_neo4j_lib.neo4j_ops import Neo4jOps from .utils.mock_graph import generate_mock_graph, store_mock_data_in_neo4j -from tc_neo4j_lib.neo4j_ops import Neo4jOps def test_network_graph_create(): diff --git a/tests/integration/test_node_stats.py b/tests/integration/test_node_stats.py index 5c82840..bcbf9cc 100644 --- a/tests/integration/test_node_stats.py +++ b/tests/integration/test_node_stats.py @@ -1,6 +1,5 @@ # test out local clustering coefficient with all nodes connected from discord_analyzer.analysis.neo4j_analysis.analyzer_node_stats import NodeStats - from tc_neo4j_lib.neo4j_ops import Neo4jOps diff --git a/tests/integration/utils/neo4j_conn.py b/tests/integration/utils/neo4j_conn.py deleted file mode 100644 index 7a7c163..0000000 --- a/tests/integration/utils/neo4j_conn.py +++ /dev/null @@ -1,28 +0,0 @@ -import os - -from dotenv import load_dotenv - - -def neo4j_setup() -> Neo4jOps: - load_dotenv() - - protocol = os.getenv("NEO4J_PROTOCOL") - host = os.getenv("NEO4J_HOST") - port = os.getenv("NEO4J_PORT") - db_name = os.getenv("NEO4J_DB") - - user = os.getenv("NEO4J_USER") - password = os.getenv("NEO4J_PASSWORD") - - neo4j_ops = Neo4jOps() - neo4j_ops.set_neo4j_db_info( - neo4j_db_name=db_name, - neo4j_protocol=protocol, - neo4j_user=user, - neo4j_password=password, - neo4j_host=host, - neo4j_port=port, - ) - neo4j_ops.neo4j_database_connect() - - return neo4j_ops From 9f8ee96ac266ca481f72649911e8b787bf4c770c Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Thu, 23 May 2024 10:46:05 +0330 Subject: [PATCH 47/48] fix: lint issues! --- worker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/worker.py b/worker.py index 414e24d..d756fd7 100644 --- a/worker.py +++ b/worker.py @@ -20,7 +20,7 @@ def worker_exception_handler(job, exc_type, exc_value, traceback): worker = Worker( queues=["default"], connection=r, exception_handlers=worker_exception_handler ) - logging.info(f"Registered the worker!") + logging.info("Registered the worker!") try: worker.work(with_scheduler=True, max_jobs=1) except KeyboardInterrupt: From 52ec08e124070b9910ff3570d9ff79a30f363474 Mon Sep 17 00:00:00 2001 From: Mohammad Amin Date: Thu, 23 May 2024 12:44:32 +0330 Subject: [PATCH 48/48] fix: trying to comment the decode response! --- utils/redis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/redis.py b/utils/redis.py index 6bd2ddb..c8224ed 100644 --- a/utils/redis.py +++ b/utils/redis.py @@ -35,5 +35,5 @@ def create_redis_client(self, redis_creds: dict[str, str]): host=redis_creds["host"], port=int(redis_creds["port"]), password=redis_creds["pass"], - decode_responses=True, + # decode_responses=True, )