-
-
- Technical Overview
-
+
+
+
+ Technical Overview
+
+
+
-{% endblock %}
+
+{% endblock content %}
+{% block after_js %}
+
+{% endblock after_js %}
diff --git a/website/views/project.py b/website/views/project.py
index d9be1b995..9b5682185 100644
--- a/website/views/project.py
+++ b/website/views/project.py
@@ -933,3 +933,226 @@ def get_context_data(self, **kwargs):
]
return context
+
+ def post(self, request, *args, **kwargs):
+ def get_issue_count(full_name, query, headers):
+ search_url = f"https://api.github.com/search/issues?q=repo:{full_name}+{query}"
+ resp = requests.get(search_url, headers=headers)
+ if resp.status_code == 200:
+ return resp.json().get("total_count", 0)
+ return 0
+
+ repo = self.get_object()
+ section = request.POST.get("section")
+
+ if section == "basic":
+ try:
+ # Get GitHub API token
+ github_token = getattr(settings, "GITHUB_TOKEN", None)
+ if not github_token:
+ return JsonResponse(
+ {"status": "error", "message": "GitHub token not configured"}, status=500
+ )
+
+ # Extract owner/repo from GitHub URL
+ match = re.match(r"https://github.com/([^/]+)/([^/]+)/?", repo.repo_url)
+ if not match:
+ return JsonResponse(
+ {"status": "error", "message": "Invalid repository URL"}, status=400
+ )
+
+ owner, repo_name = match.groups()
+ api_url = f"https://api.github.com/repos/{owner}/{repo_name}"
+
+ # Make GitHub API request
+ headers = {
+ "Authorization": f"token {github_token}",
+ "Accept": "application/vnd.github.v3+json",
+ }
+ response = requests.get(api_url, headers=headers)
+
+ if response.status_code == 200:
+ data = response.json()
+
+ # Update repo with fresh data
+ repo.stars = data.get("stargazers_count", 0)
+ repo.forks = data.get("forks_count", 0)
+ repo.watchers = data.get("watchers_count", 0)
+ repo.open_issues = data.get("open_issues_count", 0)
+ repo.network_count = data.get("network_count", 0)
+ repo.subscribers_count = data.get("subscribers_count", 0)
+ repo.last_updated = parse_datetime(data.get("updated_at"))
+ repo.save()
+
+ return JsonResponse(
+ {
+ "status": "success",
+ "message": "Basic information updated successfully",
+ "data": {
+ "stars": repo.stars,
+ "forks": repo.forks,
+ "watchers": repo.watchers,
+ "network_count": repo.network_count,
+ "subscribers_count": repo.subscribers_count,
+ "last_updated": naturaltime(repo.last_updated).replace(
+ "\xa0", " "
+ ), # Fix unicode space
+ },
+ }
+ )
+ else:
+ return JsonResponse(
+ {"status": "error", "message": f"GitHub API error: {response.status_code}"},
+ status=response.status_code,
+ )
+
+ except requests.RequestException as e:
+ return JsonResponse(
+ {
+ "status": "error",
+ "message": "Network error: A network error occurred. Please try again later.",
+ },
+ status=503,
+ )
+ except requests.HTTPError as e:
+ return JsonResponse(
+ {
+ "status": "error",
+ "message": "A GitHub API error occurred. Please try again later.",
+ },
+ status=e.response.status_code,
+ )
+ except ValueError as e:
+ return JsonResponse(
+ {"status": "error", "message": "There was an error processing your data."},
+ status=400,
+ )
+
+ elif section == "metrics":
+ try:
+ github_token = getattr(settings, "GITHUB_TOKEN", None)
+ if not github_token:
+ return JsonResponse(
+ {"status": "error", "message": "GitHub token not configured"}, status=500
+ )
+
+ match = re.match(r"https://github.com/([^/]+)/([^/]+)/?", repo.repo_url)
+ if not match:
+ return JsonResponse(
+ {"status": "error", "message": "Invalid repository URL"}, status=400
+ )
+
+ # Extract owner and repo from API call
+ owner, repo_name = match.groups()
+ api_url = f"https://api.github.com/repos/{owner}/{repo_name}"
+ headers = {
+ "Authorization": f"token {github_token}",
+ "Accept": "application/vnd.github.v3+json",
+ }
+ response = requests.get(api_url, headers=headers)
+
+ if response.status_code != 200:
+ return JsonResponse(
+ {"status": "error", "message": "Failed to fetch repository data"},
+ status=500,
+ )
+
+ repo_data = response.json()
+ full_name = repo_data.get("full_name")
+ default_branch = repo_data.get("default_branch")
+ if not full_name:
+ return JsonResponse(
+ {"status": "error", "message": "Could not get repository full name"},
+ status=500,
+ )
+
+ full_name = full_name.replace(" ", "+")
+
+ # get the total commit
+ url = f"https://api.github.com/repos/{full_name}/commits"
+ params = {"per_page": 1, "page": 1}
+ response = requests.get(url, headers=headers, params=params)
+ if response.status_code == 200:
+ if "Link" in response.headers:
+ links = response.headers["Link"]
+ last_page = 1
+ for link in links.split(","):
+ if 'rel="last"' in link:
+ last_page = int(link.split("&page=")[1].split(">")[0])
+ commit_count = last_page
+ else:
+ commits = response.json()
+ total_commits = len(commits)
+ commit_count = total_commits
+ else:
+ commit_count = 0
+
+ # Get open issues and PRs
+ open_issues = get_issue_count(full_name, "type:issue+state:open", headers)
+ closed_issues = get_issue_count(full_name, "type:issue+state:closed", headers)
+ open_pull_requests = get_issue_count(full_name, "type:pr+state:open", headers)
+ total_issues = open_issues + closed_issues
+
+ if (
+ repo.open_issues != open_issues
+ or repo.closed_issues != closed_issues
+ or repo.total_issues != total_issues
+ or repo.open_pull_requests != open_pull_requests
+ or repo.commit_count != commit_count
+ ):
+ # Update repository metrics
+ repo.open_issues = open_issues
+ repo.closed_issues = closed_issues
+ repo.total_issues = total_issues
+ repo.open_pull_requests = open_pull_requests
+ repo.commit_count = commit_count
+
+ commits_url = f"{api_url}/commits?sha={default_branch}&per_page=1"
+ commits_response = requests.get(commits_url, headers=headers)
+ if commits_response.status_code == 200:
+ commit_data = commits_response.json()
+ if commit_data:
+ date_str = commit_data[0]["commit"]["committer"]["date"]
+ repo.last_commit_date = parse_datetime(date_str)
+ repo.save()
+
+ return JsonResponse(
+ {
+ "status": "success",
+ "message": "Activity metrics updated successfully",
+ "data": {
+ "open_issues": repo.open_issues,
+ "closed_issues": repo.closed_issues,
+ "total_issues": repo.total_issues,
+ "open_pull_requests": repo.open_pull_requests,
+ "commit_count": repo.commit_count,
+ "last_commit_date": repo.last_commit_date.strftime("%b %d, %Y")
+ if repo.last_commit_date
+ else "",
+ },
+ }
+ )
+
+ except requests.RequestException as e:
+ return JsonResponse(
+ {
+ "status": "error",
+ "message": "Network error: A network error occurred. Please try again later.",
+ },
+ status=503,
+ )
+ except requests.HTTPError as e:
+ return JsonResponse(
+ {
+ "status": "error",
+ "message": "A GitHub API error occurred. Please try again later.",
+ },
+ status=e.response.status_code,
+ )
+ except ValueError as e:
+ return JsonResponse(
+ {"status": "error", "message": "There was an error processing your data."},
+ status=400,
+ )
+
+ return super().post(request, *args, **kwargs)
From 23ad685880ff5f8b3170e9e33d248b19d8a6d7cd Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 11:26:08 -0500
Subject: [PATCH 29/52] Add scout-apm dependency for application performance
monitoring
---
poetry.lock | 80 ++++++++++++++++++++++++++++++++++++++++++++++++++++-
1 file changed, 79 insertions(+), 1 deletion(-)
diff --git a/poetry.lock b/poetry.lock
index 52a52ffab..416652472 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -3980,6 +3980,84 @@ files = [
{file = "ruff-0.8.4.tar.gz", hash = "sha256:0d5f89f254836799af1615798caa5f80b7f935d7a670fad66c5007928e57ace8"},
]
+[[package]]
+name = "scout-apm"
+version = "3.2.1"
+description = "Scout Application Performance Monitoring Agent"
+optional = false
+python-versions = "<4,>=3.8"
+files = [
+ {file = "scout_apm-3.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f2b13d508f5d5dc590ea392066acd366696370a9866d50217bb6fbff385cf5b"},
+ {file = "scout_apm-3.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:386f6caa3db81211a7f197322da041c87d9623ee0eedf724b79ffd34867a7b22"},
+ {file = "scout_apm-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fff3c9d05a099ab3c1c8390e8b9079aa7ee58a48793ba561163b14d228adc6d3"},
+ {file = "scout_apm-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9757f4e2118385b7f91ee29cc15a55b5ae807044d9f035b1f066aeeb5ef9e784"},
+ {file = "scout_apm-3.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fc81d7b65cd291ab022a32f3e5b0fa06b7108ced73f99bec712d7a7a973e385"},
+ {file = "scout_apm-3.2.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a89c5e619b5505eb811a3aab6c0d2ea7eb117f503a234253b8a4a4f678f5216"},
+ {file = "scout_apm-3.2.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2135c6e64813b424f38263f622f009a8f61555017aba235779d9088cc8c75e64"},
+ {file = "scout_apm-3.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5ce0e8a60e9f3961272116fab862b0c61fbc73721281e061b352b376ad72441d"},
+ {file = "scout_apm-3.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5b5ef6fca89655eb7789d5a57857019f945a2cf96fffc955b76857386a6e8d98"},
+ {file = "scout_apm-3.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6076a58b5390d93ab2e5ffe5f9ce72653123f4cde8cd0a7a320d5e617c5ae2c6"},
+ {file = "scout_apm-3.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:78e480dae8ca233c563885f63743fcbf4551ccc5fd3d573e18442def6b985cdb"},
+ {file = "scout_apm-3.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6cb7e9a37c513b85317f99d72d10dc6ef044df441496ae0fc576762931cf60e4"},
+ {file = "scout_apm-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbdfa1b8cda52fa87d787fc76a96a81e8da9872a6ad07d52631497c87bb44dd0"},
+ {file = "scout_apm-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf769369d6cfe03aa72762662cc2e07a3a71425675bb605ffae2f8b68acc9f2c"},
+ {file = "scout_apm-3.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7cd5ea92735e1651e8a5396493c3c6122268ed27bd6e6208e69386558befac1"},
+ {file = "scout_apm-3.2.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a6b2d5f1e5a34cf6928ea04680125026d366f1ebb25cb9d722f553c51d389c"},
+ {file = "scout_apm-3.2.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e59e9ea77af9ad23a9d09082326015a52be00e13fdc38aa291543d030b1bced"},
+ {file = "scout_apm-3.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:13c1cfb241b78b57c36a3dcfdd69567137ce4fb61cdfb3279dac926cd8028cde"},
+ {file = "scout_apm-3.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:337612cd915d11a6b8cc302edf8ee0da881fa00194a0c1b291ab07a4ef52d280"},
+ {file = "scout_apm-3.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29bc0b7340b4775bb30a6d34f935130588d68483fec164c1de6d53ab64071542"},
+ {file = "scout_apm-3.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9ce6aeb810acfe5ecc7f485257e1b19705db074e5c405c2f72e38efa64e59d4c"},
+ {file = "scout_apm-3.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4f6b08defc72a3dd57791d02f9741be97e7b353c965f0f922360ec58e3ea6200"},
+ {file = "scout_apm-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f35a5536c1eb32f6ea7b708462325bcdc99268e4145bc6a0afe886badd9cf18f"},
+ {file = "scout_apm-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28148b6e61468dc6df0677516b06844d2fe8faa2b044567efc6b96d7da218a36"},
+ {file = "scout_apm-3.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f45c61f75f83c21420327097c7780cd84c766b3a26ac1e83d4ec95dfa672ed9c"},
+ {file = "scout_apm-3.2.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5582f9996ab71346513ad36a6fd19bf8ccdae44d503120dc68d86802dfb524b"},
+ {file = "scout_apm-3.2.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:07926e68e06fd9418a755c9ca78eb127ce677d85b5243b40d177af218c31c3bc"},
+ {file = "scout_apm-3.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d67a8821ab6ab659dd6b0afa4d6345d61fe04b672d11c1150b5f2f1cd857c68b"},
+ {file = "scout_apm-3.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7808c08c24d2e2be46835e547465574dfa5a0abfff164eef1dd581fe71168e67"},
+ {file = "scout_apm-3.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93c09eb5052d14ebb0ea7f4541c22cbb21bf814fcf20263f21bed870748bd6cb"},
+ {file = "scout_apm-3.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:38ccb0a498e1d9d7579fb83965f60ae5837962cea0fa99a43cbedca1095df3f3"},
+ {file = "scout_apm-3.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7a3dae1c79561eace8903f0650c1ffe56763db3352edf7f262c5e2c6dc7a190e"},
+ {file = "scout_apm-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cf1a5ba684ac2613759bd706b847df72645299fc3e365a6e12675d6f1ffe7396"},
+ {file = "scout_apm-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caf874e599ab7ddd09d6df6cdec16fed741159e170afd679b2379b5dbf194907"},
+ {file = "scout_apm-3.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9601011de7c2389934d9b535cab5e431e0428824624a391aa43a6cc803ec37b"},
+ {file = "scout_apm-3.2.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95e428292d6158a0360d08be8215e0f7d6630b9b2429110ea839cf828de3e58e"},
+ {file = "scout_apm-3.2.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a35e88d190bdc501b217fb271c42ae3637184c8835b386f6751c34f00b579bf"},
+ {file = "scout_apm-3.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d54ff09267d80fe189ae99b6739af8cf0a0085c7263055e716bdb51b8e4f23ad"},
+ {file = "scout_apm-3.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:24d0f1f0ec684829a3be3b36e00ec9663e45bbcd38d9c6acda6f4b6413a24d3d"},
+ {file = "scout_apm-3.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3e733257b817e7fddd3d8f57e340d4765cf02c58a6a7fd8ad55823041ab34a42"},
+ {file = "scout_apm-3.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9b4f8c7b3bafd3a6650b5a52c6b5f30f554da20b1879e789eb929d37440fdcf"},
+ {file = "scout_apm-3.2.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a2267be02c56738fe8056abfbf7ff4db0f18427b1124b3709c39c7d9ccfd3062"},
+ {file = "scout_apm-3.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b383eac6b96bae8732b7d8243ba1bf08d8bc04f7d5e4f0fe4dda92af4fed517f"},
+ {file = "scout_apm-3.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05780a6cfe6762a72493c9e3dad62311a96c4e37c883bb750d1c7405e985d1a"},
+ {file = "scout_apm-3.2.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce08a5d6bea569cf441f5dca87974f37e890307db10506aa8ad3c03d459e0f14"},
+ {file = "scout_apm-3.2.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7ac9eeda31f3ddd53fac0d5055e125055cc42d7618f971d37cd64eecb561ab"},
+ {file = "scout_apm-3.2.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9933edef14c729c902a932c94a3b84da97c4aede197c0e8447f66c9eb601c4b0"},
+ {file = "scout_apm-3.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2d7aeb8fe127490fe2f0c366a92a4fd6073113c8fb62e15ecb744804fc9d3b2f"},
+ {file = "scout_apm-3.2.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:2fb732f5ab0bfc200485f45a80d786efcc2881a2a4a7f03cde2df85bbe243464"},
+ {file = "scout_apm-3.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5beccd63927d6cbc67f894e86fe74ac10e96fbca7c925ec6e9db2c21f7f40673"},
+ {file = "scout_apm-3.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:59dabfb4aa4917a96e9199c1749e329115987b789026e52c8db6f64e968616f1"},
+ {file = "scout_apm-3.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6015ad17b43624d6023b50675e0ac5d1c323ed29f969dc2a1a38d9192c7b3732"},
+ {file = "scout_apm-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db70fcfbd805892675561e413a38d1d0dbaec86bb1be24cc9a80372dccfa1074"},
+ {file = "scout_apm-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fd6ee26378916548bbe422fe8c94bd1fbbcee0cb06ff64fd07855747cef6306"},
+ {file = "scout_apm-3.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72af70a094f6c7082b54d0a677a31cd1615af70dfb5e8246a1491c6ffc3770d4"},
+ {file = "scout_apm-3.2.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b8de2ebbdcc51039d977fefcfe9811efa680f65305a5dc3e0905ff27f7c2152"},
+ {file = "scout_apm-3.2.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5c9452fa6ba1f4ed6111cc9e7d393842b023f747cd1977dab2fc55701ebb83c8"},
+ {file = "scout_apm-3.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8236bed9104b912b476d7769af354f14d4d5cdd0bf2f0ed029a74fc2896f0574"},
+ {file = "scout_apm-3.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aba37ce89dd9bf90b599f5a7ea7b12bad9386937d6fd5e41642b2b831377c469"},
+ {file = "scout_apm-3.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fbc8ee23f13b5d8a49567b5bb33d334b78f58b143b789c4608824a984adef08"},
+ {file = "scout_apm-3.2.1-py3-none-any.whl", hash = "sha256:b39016be3f84de6b92501dc735bd686129f4e2ed8c05eac4632e7b365d8c58d4"},
+ {file = "scout_apm-3.2.1.tar.gz", hash = "sha256:7f1da37cc1c0b3a5fceb279df635dd84a8b3537e3712df94749ff9ed1c52a28c"},
+]
+
+[package.dependencies]
+asgiref = "*"
+certifi = "*"
+psutil = ">=5,<6"
+urllib3 = "*"
+wrapt = ">=1.10,<2.0"
+
[[package]]
name = "selenium"
version = "4.27.1"
@@ -4890,4 +4968,4 @@ propcache = ">=0.2.0"
[metadata]
lock-version = "2.0"
python-versions = "3.11.2"
-content-hash = "8a9a652566a2be701d2c9a4d27e0cc5af74494cbef750c0a5e37e3fa68cddf4b"
+content-hash = "e3375faeef609c410a754e069631f0f8f7bfde91dbd0925c974b7c9489755ba2"
From 315340d6c16f9f32c2052f4cf389c274c0586995 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 11:26:30 -0500
Subject: [PATCH 30/52] Add scout-apm dependency for application performance
monitoring
---
pyproject.toml | 1 +
1 file changed, 1 insertion(+)
diff --git a/pyproject.toml b/pyproject.toml
index 0ac44341e..87dceb481 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -81,6 +81,7 @@ channels = "^4.2.0"
channels-redis = "^4.2.1"
async-timeout = "^5.0.1"
aiohttp = "^3.11.11"
+scout-apm = "^3.2.1"
[tool.poetry.group.dev.dependencies]
black = "^24.8.0"
From 41de75a8795251dc13dbdc5fdd24e500b5559a97 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 11:28:09 -0500
Subject: [PATCH 31/52] Add Scout APM integration and improve code formatting
in settings.py
---
blt/settings.py | 12 ++++++++++++
1 file changed, 12 insertions(+)
diff --git a/blt/settings.py b/blt/settings.py
index dbed369dc..38e62330c 100644
--- a/blt/settings.py
+++ b/blt/settings.py
@@ -51,6 +51,18 @@
SITE_ID = 1
+
+INSTALLED_APPS = [
+ "scout_apm.django", # should be listed first
+ # ... other apps ...
+]
+
+# Scout settings
+SCOUT_MONITOR = True
+SCOUT_KEY = os.environ.get("SCOUT_KEY")
+SCOUT_NAME = PROJECT_NAME
+
+
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
From e465c6c4f5dcf50ddcefe7cbe1fc30d08bae3838 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 12:11:43 -0500
Subject: [PATCH 32/52] Refactor settings.py for improved readability and
formatting
---
blt/settings.py | 7 +------
1 file changed, 1 insertion(+), 6 deletions(-)
diff --git a/blt/settings.py b/blt/settings.py
index 38e62330c..7a7c1f986 100644
--- a/blt/settings.py
+++ b/blt/settings.py
@@ -51,12 +51,6 @@
SITE_ID = 1
-
-INSTALLED_APPS = [
- "scout_apm.django", # should be listed first
- # ... other apps ...
-]
-
# Scout settings
SCOUT_MONITOR = True
SCOUT_KEY = os.environ.get("SCOUT_KEY")
@@ -64,6 +58,7 @@
INSTALLED_APPS = (
+ "scout_apm.django",
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
From d3cff4fa3ecac36b13071852b9cfc3364fade368 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 13:23:33 -0500
Subject: [PATCH 33/52] Add New Relic integration for application monitoring
and update Procfile
---
Procfile | 2 +-
poetry.lock | 43 ++++++++++++++++++++++++++++++++++++++++++-
pyproject.toml | 1 +
3 files changed, 44 insertions(+), 2 deletions(-)
diff --git a/Procfile b/Procfile
index 42fce1a42..ae6ff3a93 100644
--- a/Procfile
+++ b/Procfile
@@ -1,2 +1,2 @@
release: python manage.py migrate --noinput
-web: bin/start-pgbouncer uvicorn blt.asgi:application --host 0.0.0.0 --port ${PORT}
+web: newrelic-admin run-program bin/start-pgbouncer uvicorn blt.asgi:application --host 0.0.0.0 --port ${PORT}
diff --git a/poetry.lock b/poetry.lock
index 416652472..414edc6b8 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -2662,6 +2662,47 @@ files = [
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
]
+[[package]]
+name = "newrelic"
+version = "10.4.0"
+description = "New Relic Python Agent"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "newrelic-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8336e0c569ad28ad33427c99bf4a5cb31ec5df8d95ac66c6065023f0b108a3a"},
+ {file = "newrelic-10.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e5c32c767b64369a8b816629688e62efc58f5ff81b5f4d97f7cf8f3f23d4deb"},
+ {file = "newrelic-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a6658cfe3924c646c1b78a70f60eb890842d8fc31f752e32a450022d397672b8"},
+ {file = "newrelic-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4ecf94d7c9103c304d6e27afdf701fd62fcd4320d9ab8d94bcaf4c77cf4576a9"},
+ {file = "newrelic-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40a3359672e99c64226b33599daa0d210128a1c7540e26e86400e5710741bf17"},
+ {file = "newrelic-10.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12572fc2f487dbdb6d2ebec6b1edbe3c3955af7638acf0bda6eff93072d45230"},
+ {file = "newrelic-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:844a38c3ddeb3d2e2d6e568ab8a7f38e12ceaefd77261c66d2c4c9a20c2596ae"},
+ {file = "newrelic-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:64da7c1155066d115e63523edfd4b9f39361b4c19a46fdde4b710b00342fb31d"},
+ {file = "newrelic-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3d2ea64631663155e10f20a793076f271bf072e3564ecf6bd723f94358cb41c"},
+ {file = "newrelic-10.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7da78dc374cc5a72bceaf6c03bc26fda6fd398b242a8aa1221e75cfa540b0"},
+ {file = "newrelic-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a4fdbdd3e6226b01baab71cc2d2406f9f83cd88ee12d72e48593a8f510e240ff"},
+ {file = "newrelic-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9df1bf1b1c0b294fce69bf0bbf87c03da544028f69ef6f1b597bf289a6b048e9"},
+ {file = "newrelic-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32fd7851d2c1c34632c4d6ae6bfe700703c99f1150ded0b16844257925d47861"},
+ {file = "newrelic-10.4.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a1b1331ff3950824a61dc356cc57acff2051e2f0ca00e30705ba5b10e8b9c9"},
+ {file = "newrelic-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8da411cbb14f675c99276e76c92557887f9f5e8044ab2e1e85d4f9826b4eca6b"},
+ {file = "newrelic-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c626fcde15412a905f2699f75d00d9c3af67e2b8f75f3ea1afdd6c0bb9a87d43"},
+ {file = "newrelic-10.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e50b0abfbfd2b6e84896341b6c6b60a1564a98fef64cc0c27b407dac95314e7"},
+ {file = "newrelic-10.4.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b2f4fe10fed13cae551c41c90cb1686beee4daea0eb03559f552a58557a272"},
+ {file = "newrelic-10.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d69864f83039ffa17e6dab1d986d6a74be2401895b1ea9cb30453788b9372113"},
+ {file = "newrelic-10.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:526ebed52279b7202f65b319574c0759620ab102331944211cadc71d5f205a8e"},
+ {file = "newrelic-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd36b7a4de9f7c49fde9849abc6fea293f70b09acad9a8573a514da5249b0f66"},
+ {file = "newrelic-10.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0297bfbef60ee57ce16e40192b3d317a8fb581f40a614a56672173db517117d"},
+ {file = "newrelic-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:de25b50eba6e4b6cc9d0ccd980405ff2cceb4df52738a765dc55586a81c69d3a"},
+ {file = "newrelic-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:12700b8560a29935ca3cb9868c6f977512d1d4d9f04a91ee16893c52273f0cc1"},
+ {file = "newrelic-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b94ca109cfa54ab2c8270d9814923ee3c0abb9e9afd7da026e0b4869f8d93969"},
+ {file = "newrelic-10.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2a1b7b8b5414061bff996d2cef140ab26e9aa8d5e4e2cdce3ce8118adbf2c3e"},
+ {file = "newrelic-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6517683ba5c1170de100b0d9314dc3f542b5108acc535700018ba2fef26d3123"},
+ {file = "newrelic-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c8015ad51d9463c5c7c9977b99214013c276bf8bba3e3f97d5432308d93e37dd"},
+ {file = "newrelic-10.4.0.tar.gz", hash = "sha256:4115eb9e5c97ad14e8cbae23dd5275dc162beb2b140c58ac4ba786c020f51aff"},
+]
+
+[package.extras]
+infinite-tracing = ["grpcio", "protobuf"]
+
[[package]]
name = "nltk"
version = "3.9.1"
@@ -4968,4 +5009,4 @@ propcache = ">=0.2.0"
[metadata]
lock-version = "2.0"
python-versions = "3.11.2"
-content-hash = "e3375faeef609c410a754e069631f0f8f7bfde91dbd0925c974b7c9489755ba2"
+content-hash = "d614aeb15753a479822bb32554445a54e6d3188ac1c97b064639ce2b74bed759"
diff --git a/pyproject.toml b/pyproject.toml
index 87dceb481..e8119dfe8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -82,6 +82,7 @@ channels-redis = "^4.2.1"
async-timeout = "^5.0.1"
aiohttp = "^3.11.11"
scout-apm = "^3.2.1"
+newrelic = "^10.4.0"
[tool.poetry.group.dev.dependencies]
black = "^24.8.0"
From bc04622efddddd08230ea7a319e8b0fe47598827 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 14:19:42 -0500
Subject: [PATCH 34/52] Refactor settings.py for improved readability and
formatting
---
blt/settings.py | 18 +++++++++++++-----
1 file changed, 13 insertions(+), 5 deletions(-)
diff --git a/blt/settings.py b/blt/settings.py
index 7a7c1f986..15c0566fc 100644
--- a/blt/settings.py
+++ b/blt/settings.py
@@ -409,16 +409,24 @@
}
}
else:
+ # temp to check memory usage
CACHES = {
"default": {
- "BACKEND": "django_redis.cache.RedisCache",
- "LOCATION": os.environ.get("REDISCLOUD_URL"),
- "OPTIONS": {
- "CLIENT_CLASS": "django_redis.client.DefaultClient",
- },
+ "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
+ "LOCATION": "unique-snowflake",
}
}
+ # CACHES = {
+ # "default": {
+ # "BACKEND": "django_redis.cache.RedisCache",
+ # "LOCATION": os.environ.get("REDISCLOUD_URL"),
+ # "OPTIONS": {
+ # "CLIENT_CLASS": "django_redis.client.DefaultClient",
+ # },
+ # }
+ # }
+
if DEBUG or TESTING:
anon_throttle = 100000
user_throttle = 100000
From 96622f2a2e94038a5392c97a3098d62432077d71 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 16:14:31 -0500
Subject: [PATCH 35/52] Update settings.py
---
blt/settings.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/blt/settings.py b/blt/settings.py
index 15c0566fc..290d97fc7 100644
--- a/blt/settings.py
+++ b/blt/settings.py
@@ -58,7 +58,7 @@
INSTALLED_APPS = (
- "scout_apm.django",
+ #"scout_apm.django",
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
From b36c568a96db54daa0ba788b7a94a392152c5a45 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 16:19:33 -0500
Subject: [PATCH 36/52] Update settings.py
---
blt/settings.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/blt/settings.py b/blt/settings.py
index 290d97fc7..ae9f8bf1e 100644
--- a/blt/settings.py
+++ b/blt/settings.py
@@ -58,7 +58,7 @@
INSTALLED_APPS = (
- #"scout_apm.django",
+ # "scout_apm.django",
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
From 4834d03f4b0e5fccd1a78e69aa567269922fa1ad Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:09:25 -0500
Subject: [PATCH 37/52] Refactor settings.py for enhanced organization and
clarity
---
website/views/core.py | 249 ++++++++++++++++--------------------------
1 file changed, 93 insertions(+), 156 deletions(-)
diff --git a/website/views/core.py b/website/views/core.py
index d32834ade..16aa7644c 100644
--- a/website/views/core.py
+++ b/website/views/core.py
@@ -104,18 +104,23 @@ def memory_usage_by_module(limit=1000):
def check_status(request):
"""
- Example status check that includes:
- - External service checks (Bitcoin, SendGrid, GitHub, OpenAI, etc.)
- - Top 5 processes by RSS memory usage
- - Database connection count
- - Redis info
- - Memory usage by module (via tracemalloc)
- This result is cached for 60 seconds to avoid expensive repeated checks.
+ Status check function with configurable components.
+ Enable/disable specific checks using the CONFIG constants.
"""
+ # Configuration flags
+ CHECK_BITCOIN = False
+ CHECK_SENDGRID = False
+ CHECK_GITHUB = False
+ CHECK_OPENAI = False
+ CHECK_MEMORY = False
+ CHECK_DATABASE = False
+ CHECK_REDIS = False
+ CACHE_TIMEOUT = 60 # Cache timeout in seconds
+
status_data = cache.get("service_status")
if not status_data:
- print("Starting memory profiling...")
+ print("Starting status checks...")
status_data = {
"bitcoin": False,
@@ -127,7 +132,7 @@ def check_status(request):
"redis_stats": {},
}
- if settings.DEBUG:
+ if CHECK_MEMORY and settings.DEBUG:
status_data.update(
{
"memory_info": psutil.virtual_memory()._asdict(),
@@ -137,131 +142,83 @@ def check_status(request):
}
)
- # -------------------------------------------------------
# Bitcoin RPC check
- # -------------------------------------------------------
- bitcoin_rpc_user = os.getenv("BITCOIN_RPC_USER")
- bitcoin_rpc_password = os.getenv("BITCOIN_RPC_PASSWORD")
- bitcoin_rpc_host = os.getenv("BITCOIN_RPC_HOST", "127.0.0.1")
- bitcoin_rpc_port = os.getenv("BITCOIN_RPC_PORT", "8332")
+ if CHECK_BITCOIN:
+ bitcoin_rpc_user = os.getenv("BITCOIN_RPC_USER")
+ bitcoin_rpc_password = os.getenv("BITCOIN_RPC_PASSWORD")
+ bitcoin_rpc_host = os.getenv("BITCOIN_RPC_HOST", "127.0.0.1")
+ bitcoin_rpc_port = os.getenv("BITCOIN_RPC_PORT", "8332")
- try:
- print("Checking Bitcoin RPC...")
- response = requests.post(
- f"http://{bitcoin_rpc_host}:{bitcoin_rpc_port}",
- json={
- "jsonrpc": "1.0",
- "id": "curltest",
- "method": "getblockchaininfo",
- "params": [],
- },
- auth=(bitcoin_rpc_user, bitcoin_rpc_password),
- timeout=5,
- )
- if response.status_code == 200:
- status_data["bitcoin"] = True
- status_data["bitcoin_block"] = response.json().get("result", {}).get("blocks")
- print("Bitcoin RPC check successful.")
- else:
- status_data["bitcoin"] = False
- print("Bitcoin RPC check failed.")
-
- except requests.exceptions.RequestException as e:
- status_data["bitcoin"] = False
- print(f"Bitcoin RPC Error: {e}")
-
- # -------------------------------------------------------
- # SendGrid API check
- # -------------------------------------------------------
- sendgrid_api_key = os.getenv("SENDGRID_API_KEY")
- if sendgrid_api_key:
try:
- print("Checking SendGrid API...")
- response = requests.get(
- "https://api.sendgrid.com/v3/user/account",
- headers={"Authorization": f"Bearer {sendgrid_api_key}"},
+ print("Checking Bitcoin RPC...")
+ response = requests.post(
+ f"http://{bitcoin_rpc_host}:{bitcoin_rpc_port}",
+ json={
+ "jsonrpc": "1.0",
+ "id": "curltest",
+ "method": "getblockchaininfo",
+ "params": [],
+ },
+ auth=(bitcoin_rpc_user, bitcoin_rpc_password),
timeout=5,
)
-
if response.status_code == 200:
- status_data["sendgrid"] = True
- print("SendGrid API check successful.")
- else:
- status_data["sendgrid"] = False
- print(
- f"SendGrid API token check failed with status code {response.status_code}: "
- f"{response.json().get('message', 'No message provided')}"
- )
-
+ status_data["bitcoin"] = True
+ status_data["bitcoin_block"] = response.json().get("result", {}).get("blocks")
except requests.exceptions.RequestException as e:
- status_data["sendgrid"] = False
- print(f"SendGrid API Error: {e}")
+ print(f"Bitcoin RPC Error: {e}")
- # -------------------------------------------------------
- # GitHub API check
- # -------------------------------------------------------
- github_token = os.getenv("GITHUB_TOKEN")
- if github_token:
- try:
- print("Checking GitHub API...")
- headers = {"Authorization": f"token {github_token}"}
+ # SendGrid API check
+ if CHECK_SENDGRID:
+ sendgrid_api_key = os.getenv("SENDGRID_API_KEY")
+ if sendgrid_api_key:
try:
+ print("Checking SendGrid API...")
response = requests.get(
- "https://api.github.com/user/repos", headers=headers, timeout=2
+ "https://api.sendgrid.com/v3/user/account",
+ headers={"Authorization": f"Bearer {sendgrid_api_key}"},
+ timeout=5,
)
- except requests.exceptions.Timeout:
- status_data["github"] = False
- print("GitHub API request timed out")
-
+ status_data["sendgrid"] = response.status_code == 200
except requests.exceptions.RequestException as e:
- status_data["github"] = False
- print(f"GitHub API Error: {e}")
+ print(f"SendGrid API Error: {e}")
- if response.status_code == 200:
- status_data["github"] = True
- print("GitHub API check successful.")
- else:
- status_data["github"] = False
- print(
- f"GitHub API token check failed with status code {response.status_code}: "
- f"{response.json().get('message', 'No message provided')}"
+ # GitHub API check
+ if CHECK_GITHUB:
+ github_token = os.getenv("GITHUB_TOKEN")
+ if github_token:
+ try:
+ print("Checking GitHub API...")
+ response = requests.get(
+ "https://api.github.com/user/repos",
+ headers={"Authorization": f"token {github_token}"},
+ timeout=5,
)
+ status_data["github"] = response.status_code == 200
+ except requests.exceptions.RequestException as e:
+ print(f"GitHub API Error: {e}")
- except requests.exceptions.RequestException as e:
- status_data["github"] = False
- print(f"GitHub API Error: {e}")
-
- # -------------------------------------------------------
# OpenAI API check
- # -------------------------------------------------------
- openai_api_key = os.getenv("OPENAI_API_KEY")
- if openai_api_key:
- try:
- print("Checking OpenAI API...")
- headers = {"Authorization": f"Bearer {openai_api_key}"}
- response = requests.get(
- "https://api.openai.com/v1/models", headers=headers, timeout=5
- )
-
- if response.status_code == 200:
- status_data["openai"] = True
- print("OpenAI API check successful.")
- else:
- status_data["openai"] = False
- print(
- f"OpenAI API token check failed with status code {response.status_code}: "
- f"{response.json().get('message', 'No message provided')}"
+ if CHECK_OPENAI:
+ openai_api_key = os.getenv("OPENAI_API_KEY")
+ if openai_api_key:
+ try:
+ print("Checking OpenAI API...")
+ response = requests.get(
+ "https://api.openai.com/v1/models",
+ headers={"Authorization": f"Bearer {openai_api_key}"},
+ timeout=5,
)
+ status_data["openai"] = response.status_code == 200
+ except requests.exceptions.RequestException as e:
+ print(f"OpenAI API Error: {e}")
- except requests.exceptions.RequestException as e:
- status_data["openai"] = False
- print(f"OpenAI API Error: {e}")
-
- if settings.DEBUG:
- # -------------------------------------------------------
- # Top memory consumers (process-level) via psutil
- # -------------------------------------------------------
- print("Getting top memory consumers...")
+ # Memory usage checks
+ if CHECK_MEMORY and settings.DEBUG:
+ print("Getting memory usage information...")
+ tracemalloc.start()
+
+ # Get top memory consumers
for proc in psutil.process_iter(["pid", "name", "memory_info"]):
try:
proc_info = proc.info
@@ -280,58 +237,38 @@ def check_status(request):
reverse=True,
)[:5]
- # -------------------------------------------------------
- # Memory usage by module (via tracemalloc)
- # -------------------------------------------------------
- print("Calculating memory usage by module...")
- top_modules = memory_usage_by_module(limit=1000)
- status_data["memory_by_module"] = top_modules
-
- # -------------------------------------------------------
- # Memory profiling info (current, peak) - optional
- # -------------------------------------------------------
- # If you want an overall snapshot: start tracemalloc before
- # the function call, or simply do an extra measure here.
- # For example:
- tracemalloc.start()
+ # Memory profiling info
current, peak = tracemalloc.get_traced_memory()
status_data["memory_profiling"]["current"] = current
status_data["memory_profiling"]["peak"] = peak
tracemalloc.stop()
- # -------------------------------------------------------
- # Database connection count
- # -------------------------------------------------------
- print("Getting database connection count...")
- if settings.DATABASES.get("default", {}).get("ENGINE") == "django.db.backends.postgresql":
- with connection.cursor() as cursor:
- cursor.execute("SELECT COUNT(*) FROM pg_stat_activity WHERE state = 'active'")
- status_data["db_connection_count"] = cursor.fetchone()[0]
+ # Database connection check
+ if CHECK_DATABASE:
+ print("Getting database connection count...")
+ if (
+ settings.DATABASES.get("default", {}).get("ENGINE")
+ == "django.db.backends.postgresql"
+ ):
+ with connection.cursor() as cursor:
+ cursor.execute("SELECT COUNT(*) FROM pg_stat_activity WHERE state = 'active'")
+ status_data["db_connection_count"] = cursor.fetchone()[0]
- # -------------------------------------------------------
# Redis stats
- # -------------------------------------------------------
- print("Getting Redis stats...")
- try:
- redis_client = get_redis_connection("default")
- status_data["redis_stats"] = redis_client.info()
- except Exception as e:
- print(f"Redis error or not supported: {e}")
+ if CHECK_REDIS:
+ print("Getting Redis stats...")
+ try:
+ redis_client = get_redis_connection("default")
+ status_data["redis_stats"] = redis_client.info()
+ except Exception as e:
+ print(f"Redis error or not supported: {e}")
- # -------------------------------------------------------
- # Cache the status data for 60s to avoid repeated overhead
- # -------------------------------------------------------
- print("Caching service status...")
- cache.set("service_status", status_data, timeout=60)
+ # Cache the results
+ cache.set("service_status", status_data, timeout=CACHE_TIMEOUT)
return render(request, "status_page.html", {"status": status_data})
-# ----------------------------------------------------------------------------------
-# 3) The rest of your existing views remain the same
-# ----------------------------------------------------------------------------------
-
-
def github_callback(request):
ALLOWED_HOSTS = ["github.com"]
params = urllib.parse.urlencode(request.GET)
From 75410c7f572cf6b4d67c98c3e3c9b88ed0a1e147 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:13:13 -0500
Subject: [PATCH 38/52] memory test
---
Procfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Procfile b/Procfile
index ae6ff3a93..32aafed5c 100644
--- a/Procfile
+++ b/Procfile
@@ -1,2 +1,2 @@
-release: python manage.py migrate --noinput
+release: python manage.py migrate --noinput && rm -rf static staticfiles
web: newrelic-admin run-program bin/start-pgbouncer uvicorn blt.asgi:application --host 0.0.0.0 --port ${PORT}
From 060bd1464b6137cbae143f816be6057b612e0703 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:18:33 -0500
Subject: [PATCH 39/52] Update Procfile to replace web server with Gunicorn and
streamline release command
---
Procfile | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/Procfile b/Procfile
index 32aafed5c..7f3ed31e0 100644
--- a/Procfile
+++ b/Procfile
@@ -1,2 +1,3 @@
-release: python manage.py migrate --noinput && rm -rf static staticfiles
-web: newrelic-admin run-program bin/start-pgbouncer uvicorn blt.asgi:application --host 0.0.0.0 --port ${PORT}
+release: python manage.py migrate --noinput
+#web: newrelic-admin run-program bin/start-pgbouncer uvicorn blt.asgi:application --host 0.0.0.0 --port ${PORT}
+web: gunicorn blt.wsgi --log-file - --workers 2 --worker-class gthread --threads 2 --timeout 120
\ No newline at end of file
From 4bc967b53d777e025f734faa7c998a94d69b92ec Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:26:15 -0500
Subject: [PATCH 40/52] Implement code changes to enhance functionality and
improve performance
---
website/views/issue.py | 59 ++++++++++++++++++++++++++----------------
1 file changed, 37 insertions(+), 22 deletions(-)
diff --git a/website/views/issue.py b/website/views/issue.py
index 3f8474b0b..ccd00acd9 100644
--- a/website/views/issue.py
+++ b/website/views/issue.py
@@ -158,7 +158,10 @@ def create_github_issue(request, id):
return JsonResponse({"status": "Failed", "status_reason": "GitHub Access Token is missing"})
if issue.github_url:
return JsonResponse(
- {"status": "Failed", "status_reason": "GitHub Issue Exists at " + issue.github_url}
+ {
+ "status": "Failed",
+ "status_reason": "GitHub Issue Exists at " + issue.github_url,
+ }
)
if issue.domain.github:
screenshot_text = ""
@@ -194,7 +197,10 @@ def create_github_issue(request, id):
return JsonResponse({"status": "ok", "github_url": issue.github_url})
else:
return JsonResponse(
- {"status": "Failed", "status_reason": f"Issue with Github: {response.reason}"}
+ {
+ "status": "Failed",
+ "status_reason": f"Issue with Github: {response.reason}",
+ }
)
except Exception as e:
send_mail(
@@ -207,7 +213,10 @@ def create_github_issue(request, id):
return JsonResponse({"status": "Failed", "status_reason": f"Failed: error is {e}"})
else:
return JsonResponse(
- {"status": "Failed", "status_reason": "No Github URL for this domain, please add it."}
+ {
+ "status": "Failed",
+ "status_reason": "No Github URL for this domain, please add it.",
+ }
)
@@ -329,7 +338,8 @@ def newhome(request, template="new_home.html"):
current_time = now()
leaderboard = (
User.objects.filter(
- points__created__month=current_time.month, points__created__year=current_time.year
+ points__created__month=current_time.month,
+ points__created__year=current_time.year,
)
.annotate(total_points=Sum("points__score"))
.order_by("-total_points")
@@ -576,10 +586,10 @@ def submit_pr(request):
class IssueBaseCreate(object):
def form_valid(self, form):
- print(
- "processing form_valid IssueBaseCreate for ip address: ",
- get_client_ip(self.request),
- )
+ # print(
+ # "processing form_valid IssueBaseCreate for ip address: ",
+ # get_client_ip(self.request),
+ # )
score = 3
obj = form.save(commit=False)
obj.user = self.request.user
@@ -610,7 +620,7 @@ def form_valid(self, form):
p = Points.objects.create(user=self.request.user, issue=obj, score=score)
def process_issue(self, user, obj, created, domain, tokenauth=False, score=3):
- print("processing process_issue for ip address: ", get_client_ip(self.request))
+ # print("processing process_issue for ip address: ", get_client_ip(self.request))
p = Points.objects.create(user=user, issue=obj, score=score, reason="Issue reported")
messages.success(self.request, "Bug added ! +" + str(score))
try:
@@ -750,7 +760,7 @@ class IssueCreate(IssueBaseCreate, CreateView):
template_name = "report.html"
def get_initial(self):
- print("processing post for ip address: ", get_client_ip(self.request))
+ # print("processing post for ip address: ", get_client_ip(self.request))
try:
json_data = json.loads(self.request.body)
if not self.request.GET._mutable:
@@ -806,7 +816,7 @@ def get_initial(self):
return initial
def post(self, request, *args, **kwargs):
- print("processing post for ip address: ", get_client_ip(request))
+ # print("processing post for ip address: ", get_client_ip(request))
url = request.POST.get("url").replace("www.", "").replace("https://", "")
request.POST._mutable = True
@@ -871,10 +881,10 @@ def post(self, request, *args, **kwargs):
return super().post(request, *args, **kwargs)
def form_valid(self, form):
- print(
- "processing form_valid in IssueCreate for ip address: ",
- get_client_ip(self.request),
- )
+ # print(
+ # "processing form_valid in IssueCreate for ip address: ",
+ # get_client_ip(self.request),
+ # )
reporter_ip = get_client_ip(self.request)
form.instance.reporter_ip_address = reporter_ip
@@ -971,7 +981,10 @@ def create_issue(self, form):
if not domain_exists and (self.request.user.is_authenticated or tokenauth):
Points.objects.create(
- user=self.request.user, domain=domain, score=1, reason="Domain added"
+ user=self.request.user,
+ domain=domain,
+ score=1,
+ reason="Domain added",
)
messages.success(self.request, "Domain added! + 1")
@@ -1119,7 +1132,9 @@ def get_context_data(self, **kwargs):
self.request.POST = {}
self.request.GET = {}
- print("processing get_context_data for ip address: ", get_client_ip(self.request))
+ # print(
+ # "processing get_context_data for ip address: ", get_client_ip(self.request)
+ # )
context = super(IssueCreate, self).get_context_data(**kwargs)
context["activities"] = Issue.objects.exclude(
Q(is_hidden=True) & ~Q(user_id=self.request.user.id)
@@ -1267,8 +1282,8 @@ class IssueView(DetailView):
template_name = "issue.html"
def get(self, request, *args, **kwargs):
- print("getting issue id: ", self.kwargs["slug"])
- print("getting issue id: ", self.kwargs)
+ # print("getting issue id: ", self.kwargs["slug"])
+ # print("getting issue id: ", self.kwargs)
ipdetails = IP()
try:
id = int(self.kwargs["slug"])
@@ -1283,8 +1298,8 @@ def get(self, request, *args, **kwargs):
ipdetails.agent = request.META["HTTP_USER_AGENT"]
ipdetails.referer = request.META.get("HTTP_REFERER", None)
- print("IP Address: ", ipdetails.address)
- print("Issue Number: ", ipdetails.issuenumber)
+ # print("IP Address: ", ipdetails.address)
+ # print("Issue Number: ", ipdetails.issuenumber)
try:
if self.request.user.is_authenticated:
@@ -1316,7 +1331,7 @@ def get(self, request, *args, **kwargs):
return super(IssueView, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
- print("getting context data")
+ # print("getting context data")
context = super(IssueView, self).get_context_data(**kwargs)
if self.object.user_agent:
user_agent = parse(self.object.user_agent)
From 747235f68a1b2571af9c4f4331ae3ae30f465670 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:28:25 -0500
Subject: [PATCH 41/52] Refactor code for improved readability and enable
memory check in status
---
website/views/core.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/website/views/core.py b/website/views/core.py
index 16aa7644c..87d21dc0e 100644
--- a/website/views/core.py
+++ b/website/views/core.py
@@ -112,7 +112,7 @@ def check_status(request):
CHECK_SENDGRID = False
CHECK_GITHUB = False
CHECK_OPENAI = False
- CHECK_MEMORY = False
+ CHECK_MEMORY = True
CHECK_DATABASE = False
CHECK_REDIS = False
CACHE_TIMEOUT = 60 # Cache timeout in seconds
From 496a05e6e117ecfc0a1dae415500a443fbbdff33 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:38:50 -0500
Subject: [PATCH 42/52] Enhance status indicators and improve code formatting
for clarity
---
website/templates/status_page.html | 20 ++++++++++++++------
website/views/core.py | 16 +++++++---------
2 files changed, 21 insertions(+), 15 deletions(-)
diff --git a/website/templates/status_page.html b/website/templates/status_page.html
index 41646aaae..7b258c760 100644
--- a/website/templates/status_page.html
+++ b/website/templates/status_page.html
@@ -69,8 +69,11 @@
Bitcoin Node
-
- {% if status.bitcoin %}
+
+
+ {% if status.bitcoin == None %}
+
Status Unknown
+ {% elif status.bitcoin %}
Operational
Block Height: {{ status.bitcoin_block }}
Bitcoin Node
SendGrid API
-
- {% if status.sendgrid %}
+
+
+ {% if status.sendgrid == None %}
+
Status Unknown
+ {% elif status.sendgrid %}
Operational
{% else %}
Not Operational
@@ -97,8 +103,10 @@
SendGrid API
GitHub API
-
- {% if status.github %}
+
+ {% if status.github == None %}
+
Status Unknown
+ {% elif status.github %}
Operational
{% else %}
Not Operational
diff --git a/website/views/core.py b/website/views/core.py
index 87d21dc0e..5361794aa 100644
--- a/website/views/core.py
+++ b/website/views/core.py
@@ -115,21 +115,19 @@ def check_status(request):
CHECK_MEMORY = True
CHECK_DATABASE = False
CHECK_REDIS = False
- CACHE_TIMEOUT = 60 # Cache timeout in seconds
+ CACHE_TIMEOUT = 60
status_data = cache.get("service_status")
if not status_data:
- print("Starting status checks...")
-
status_data = {
- "bitcoin": False,
+ "bitcoin": None if not CHECK_BITCOIN else False,
"bitcoin_block": None,
- "sendgrid": False,
- "github": False,
- "openai": False,
- "db_connection_count": 0,
- "redis_stats": {},
+ "sendgrid": None if not CHECK_SENDGRID else False,
+ "github": None if not CHECK_GITHUB else False,
+ "openai": None if not CHECK_OPENAI else False,
+ "db_connection_count": None if not CHECK_DATABASE else 0,
+ "redis_stats": {} if not CHECK_REDIS else {},
}
if CHECK_MEMORY and settings.DEBUG:
From f85f02d025bf516d431d7ea06ed9ce4beb760c07 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:45:02 -0500
Subject: [PATCH 43/52] comment out social logins
---
website/templates/account/login.html | 26 ++++++++------------------
1 file changed, 8 insertions(+), 18 deletions(-)
diff --git a/website/templates/account/login.html b/website/templates/account/login.html
index 3886db164..d1cca3596 100644
--- a/website/templates/account/login.html
+++ b/website/templates/account/login.html
@@ -106,50 +106,40 @@
{% trans "Account Login" %
{% endfor %}
{% endif %}
-
+
From f0b69ab34b5aa26f2acf5569e25e0a376751ff90 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:46:53 -0500
Subject: [PATCH 44/52] Update OpenAI API status display to handle unknown
status and improve visual indicators
---
website/templates/status_page.html | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/website/templates/status_page.html b/website/templates/status_page.html
index 7b258c760..90b962f0c 100644
--- a/website/templates/status_page.html
+++ b/website/templates/status_page.html
@@ -185,8 +185,10 @@
Redis Stats
OpenAI API
-
- {% if status.openai %}
+
+ {% if status.openai == None %}
+
Status Unknown
+ {% elif status.openai %}
Operational
{% else %}
Not Operational
From 81caec3ca7cc5169ddc2a52b6850aeb4f1173ebe Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:50:02 -0500
Subject: [PATCH 45/52] Refactor code for improved readability and consistency
in formatting
---
website/views/core.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/website/views/core.py b/website/views/core.py
index 5361794aa..e6115add8 100644
--- a/website/views/core.py
+++ b/website/views/core.py
@@ -212,7 +212,7 @@ def check_status(request):
print(f"OpenAI API Error: {e}")
# Memory usage checks
- if CHECK_MEMORY and settings.DEBUG:
+ if CHECK_MEMORY:
print("Getting memory usage information...")
tracemalloc.start()
@@ -274,6 +274,9 @@ def github_callback(request):
return safe_redirect_allowed(url, ALLOWED_HOSTS)
+s
+
+
def google_callback(request):
ALLOWED_HOSTS = ["accounts.google.com"]
params = urllib.parse.urlencode(request.GET)
From d01fb46d87a545e97bba070c0f4e547aff3bccea Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:53:01 -0500
Subject: [PATCH 46/52] Add memory usage print statements to CI/CD workflow and
clean up code
---
.github/workflows/ci-cd.yml | 15 ++++++++++++++-
website/views/core.py | 3 ---
2 files changed, 14 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml
index a4183fa4a..7d2be7f11 100644
--- a/.github/workflows/ci-cd.yml
+++ b/.github/workflows/ci-cd.yml
@@ -53,6 +53,9 @@ jobs:
restore-keys: |
${{ runner.os }}-poetry-
+ - name: Print memory usage
+ run: free -h
+
pre-commit:
name: Run pre-commit
needs: setup
@@ -69,6 +72,8 @@ jobs:
python-version: 3.11.2
- name: Run pre-commit
uses: pre-commit/action@v3.0.1
+ - name: Print memory usage
+ run: free -h
code-ql:
name: Run CodeQL
@@ -132,6 +137,9 @@ jobs:
- uses: github/codeql-action/analyze@v2
if: ${{ steps.set-matrix.outputs.languages != '' }}
+ - name: Print memory usage
+ run: free -h
+
test:
name: Run Tests
needs: code-ql
@@ -152,6 +160,8 @@ jobs:
- run: poetry run python manage.py collectstatic --noinput
- name: Run tests
run: poetry run xvfb-run --auto-servernum python manage.py test -v 3 --failfast
+ - name: Print memory usage
+ run: free -h
docker-test:
runs-on: ubuntu-latest
@@ -182,4 +192,7 @@ jobs:
- name: Clean up
run: |
docker stop my-container
- docker rm my-container
\ No newline at end of file
+ docker rm my-container
+
+ - name: Print memory usage
+ run: free -h
\ No newline at end of file
diff --git a/website/views/core.py b/website/views/core.py
index e6115add8..40b93e755 100644
--- a/website/views/core.py
+++ b/website/views/core.py
@@ -274,9 +274,6 @@ def github_callback(request):
return safe_redirect_allowed(url, ALLOWED_HOSTS)
-s
-
-
def google_callback(request):
ALLOWED_HOSTS = ["accounts.google.com"]
params = urllib.parse.urlencode(request.GET)
From abf25852e1645a6c5a34658141e8bc7305db060f Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 19:58:22 -0500
Subject: [PATCH 47/52] fix
---
website/views/core.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/website/views/core.py b/website/views/core.py
index 40b93e755..02f8db300 100644
--- a/website/views/core.py
+++ b/website/views/core.py
@@ -130,7 +130,7 @@ def check_status(request):
"redis_stats": {} if not CHECK_REDIS else {},
}
- if CHECK_MEMORY and settings.DEBUG:
+ if CHECK_MEMORY:
status_data.update(
{
"memory_info": psutil.virtual_memory()._asdict(),
From 645fd7332028c0120248b56aacefba8a3c2fe0d1 Mon Sep 17 00:00:00 2001
From: DonnieBLT <128622481+DonnieBLT@users.noreply.github.com>
Date: Thu, 2 Jan 2025 20:03:36 -0500
Subject: [PATCH 48/52] Reduce Gunicorn worker count from 2 to 1 for optimized
resource usage
---
Procfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Procfile b/Procfile
index 7f3ed31e0..0945ffcee 100644
--- a/Procfile
+++ b/Procfile
@@ -1,3 +1,3 @@
release: python manage.py migrate --noinput
#web: newrelic-admin run-program bin/start-pgbouncer uvicorn blt.asgi:application --host 0.0.0.0 --port ${PORT}
-web: gunicorn blt.wsgi --log-file - --workers 2 --worker-class gthread --threads 2 --timeout 120
\ No newline at end of file
+web: gunicorn blt.wsgi --log-file - --workers 1 --worker-class gthread --threads 2 --timeout 120
\ No newline at end of file
From 4e7e5e0bc2e2ddddb9558702211cc21547f20504 Mon Sep 17 00:00:00 2001
From: Altafur Rahman
Date: Sun, 5 Jan 2025 23:22:50 +0600
Subject: [PATCH 49/52] Issue 2972 added refrash feature 2 (#3204)
* Add command to synchronize repository contributors and update technical details in RepoDetailView
* Add command to synchronize repository contributors and update technical details in RepoDetailView
* --no-update officially deleted from poetry
* poetry assume we want to install local package BLT
* poetry assume we want to install local package BLT
---
.github/workflows/ci-cd.yml | 6 +-
Dockerfile | 4 +-
.../commands/sync_repo_contributors.py | 82 ++++++++++++
website/templates/projects/repo_detail.html | 126 +++++++++++++-----
website/views/project.py | 121 +++++++++++++++++
5 files changed, 304 insertions(+), 35 deletions(-)
create mode 100644 website/management/commands/sync_repo_contributors.py
diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml
index 7d2be7f11..87ea6f167 100644
--- a/.github/workflows/ci-cd.yml
+++ b/.github/workflows/ci-cd.yml
@@ -155,7 +155,7 @@ jobs:
with:
python-version: 3.11.2
- run: pip install poetry
- - run: poetry lock --no-update
+ - run: poetry lock
- run: poetry install
- run: poetry run python manage.py collectstatic --noinput
- name: Run tests
@@ -186,8 +186,8 @@ jobs:
docker run -d --name my-container my-app
- run: docker exec my-container pip install poetry
- - run: docker exec my-container poetry lock --no-update
- - run: docker exec my-container poetry install --no-dev --no-interaction
+ - run: docker exec my-container poetry lock
+ - run: docker exec my-container poetry install --without dev --no-interaction
- name: Clean up
run: |
diff --git a/Dockerfile b/Dockerfile
index 048b84014..80f86acfc 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -33,8 +33,8 @@ RUN ln -s /usr/bin/google-chrome-stable /usr/local/bin/google-chrome
RUN pip install poetry
RUN poetry config virtualenvs.create false
COPY pyproject.toml poetry.lock* ./
-RUN poetry lock --no-update
-RUN poetry install
+RUN poetry lock
+RUN poetry install --no-root
# Install additional Python packages
RUN pip install opentelemetry-api opentelemetry-instrumentation
diff --git a/website/management/commands/sync_repo_contributors.py b/website/management/commands/sync_repo_contributors.py
new file mode 100644
index 000000000..096639d37
--- /dev/null
+++ b/website/management/commands/sync_repo_contributors.py
@@ -0,0 +1,82 @@
+import logging
+import time
+
+import requests
+from django.conf import settings
+from django.core.management.base import BaseCommand
+
+from website.models import Contributor, Repo
+
+logger = logging.getLogger(__name__)
+
+
+class Command(BaseCommand):
+ help = "Synchronize all contributors for a repository"
+
+ def add_arguments(self, parser):
+ parser.add_argument("--repo_id", type=int, help="Repository ID to sync")
+
+ def handle(self, *args, **options):
+ repo_id = options.get("repo_id")
+ if not repo_id:
+ return
+
+ repo = Repo.objects.get(id=repo_id)
+ owner_repo = repo.repo_url.rstrip("/").split("github.com/")[-1]
+
+ headers = {
+ "Authorization": f"token {settings.GITHUB_TOKEN}",
+ "Accept": "application/vnd.github.v3+json",
+ }
+
+ # Get all contributors with pagination
+ page = 1
+ all_contributors = []
+
+ while True:
+ api_url = f"https://api.github.com/repos/{owner_repo}/contributors?anon=true&per_page=100&page={page}"
+ response = requests.get(api_url, headers=headers)
+
+ if response.status_code == 403:
+ reset_time = int(response.headers.get("X-RateLimit-Reset", 0))
+ wait_time = reset_time - int(time.time())
+ if wait_time > 0:
+ logger.info(f"Rate limit hit, waiting {wait_time} seconds")
+ time.sleep(wait_time)
+ continue
+
+ if response.status_code != 200:
+ break
+
+ contributors_page = response.json()
+ if not contributors_page:
+ break
+
+ all_contributors.extend(contributors_page)
+ page += 1
+
+ # Be nice to GitHub API
+ time.sleep(1)
+
+ # Batch create/update contributors
+ for contrib_data in all_contributors:
+ github_id = contrib_data.get("id")
+ if not github_id:
+ # skip if 'id' is missing
+ continue
+ contributor, created = Contributor.objects.update_or_create(
+ github_id=github_id,
+ defaults={
+ "name": contrib_data.get("login", "unknown"),
+ "github_url": contrib_data.get("html_url", ""),
+ "avatar_url": contrib_data.get("avatar_url", ""),
+ "contributions": contrib_data.get("contributions", 0),
+ "contributor_type": contrib_data.get("type", "User"),
+ },
+ )
+ repo.contributor.add(contributor)
+
+ repo.contributor_count = len(all_contributors)
+ repo.save()
+
+ logger.info(f"Synced {len(all_contributors)} contributors for {repo.name}")
diff --git a/website/templates/projects/repo_detail.html b/website/templates/projects/repo_detail.html
index 1a3b911c3..83ed4ab96 100644
--- a/website/templates/projects/repo_detail.html
+++ b/website/templates/projects/repo_detail.html
@@ -314,25 +314,28 @@
Community
-
+
Top Contributors
-
+
{{ repo.contributor_count|intcomma }} total contributors
-
+
{% for contributor in top_contributors %}
Technical Overview
-
+
@@ -425,21 +430,20 @@
Primary Language:
-
+
{{ repo.primary_language|default:"Not specified" }}
Repository Size:
-
- {{ repo.size|filesizeformat }}
-
+ {{ repo.size|filesizeformat }}
License:
-
- {{ repo.license|default:"Not specified" }}
-
+
{{ repo.license|default:"Not specified" }}
@@ -458,22 +462,23 @@
{% if repo.release_name %}
Version
-
- {{ repo.release_name }}
-
+ {{ repo.release_name }}
{% endif %}
{% if repo.release_datetime %}
Release Date
-
+
{{ repo.release_datetime|date:"M d, Y" }}
{% endif %}
Last Commit
-
+
{{ repo.last_commit_date|date:"M d, Y" }}
@@ -670,6 +675,67 @@ {
+ contributorsHtml += `
+
+
+
+
+ ${contributor.name}
+ ${contributor.verified ? '✓' : ''}
+
+
${contributor.contributions.toLocaleString()} commits
+
+
+
+
+
+ `;
+ });
+
+ contributorsContainer.innerHTML = contributorsHtml;
+
// Show success message
messageContainer.className = 'refresh-message text-sm text-green-600';
messageContainer.textContent = data.message;
diff --git a/website/views/project.py b/website/views/project.py
index 9b5682185..9b7384686 100644
--- a/website/views/project.py
+++ b/website/views/project.py
@@ -1155,4 +1155,125 @@ def get_issue_count(full_name, query, headers):
status=400,
)
+ elif section == "technical":
+ try:
+ github_token = getattr(settings, "GITHUB_TOKEN", None)
+ if not github_token:
+ return JsonResponse(
+ {"status": "error", "message": "GitHub token not configured"}, status=500
+ )
+
+ match = re.match(r"https://github.com/([^/]+)/([^/]+)/?", repo.repo_url)
+ if not match:
+ return JsonResponse(
+ {"status": "error", "message": "Invalid repository URL"}, status=400
+ )
+
+ owner, repo_name = match.groups()
+ api_url = f"https://api.github.com/repos/{owner}/{repo_name}"
+ headers = {
+ "Authorization": f"token {github_token}",
+ "Accept": "application/vnd.github.v3+json",
+ }
+
+ response = requests.get(api_url, headers=headers)
+ if response.status_code != 200:
+ return JsonResponse(
+ {"status": "error", "message": "Failed to fetch repository data"},
+ status=500,
+ )
+
+ repo_data = response.json()
+
+ # Update repository technical details
+ repo.primary_language = repo_data.get("language")
+ repo.size = repo_data.get("size", 0)
+ repo.license = repo_data.get("license", {}).get("name")
+
+ # Get latest release info
+ releases_url = f"{api_url}/releases/latest"
+ release_response = requests.get(releases_url, headers=headers)
+ if release_response.status_code == 200:
+ release_data = release_response.json()
+ repo.release_name = release_data.get("name") or release_data.get("tag_name")
+ repo.release_datetime = parse_datetime(release_data.get("published_at"))
+
+ repo.save()
+
+ return JsonResponse(
+ {
+ "status": "success",
+ "message": "Technical information updated successfully",
+ "data": {
+ "primary_language": repo.primary_language or "Not specified",
+ "size": repo.size,
+ "license": repo.license or "Not specified",
+ "release_name": repo.release_name or "Not available",
+ "release_date": repo.release_datetime.strftime("%b %d, %Y")
+ if repo.release_datetime
+ else "Not available",
+ "last_commit_date": repo.last_commit_date.strftime("%b %d, %Y")
+ if repo.last_commit_date
+ else "Not available",
+ },
+ }
+ )
+
+ except requests.RequestException as e:
+ return JsonResponse(
+ {
+ "status": "error",
+ "message": "Network error: A network error occurred. Please try again later.",
+ },
+ status=503,
+ )
+ except Exception as e:
+ return JsonResponse(
+ {"status": "error", "message": "An unexpected error occurred."}, status=500
+ )
+
+ elif section == "community":
+ try:
+ from django.core.management import call_command
+
+ repo = self.get_object()
+
+ # Run sync command
+ call_command("sync_repo_contributors", "--repo_id", repo.id)
+
+ # Refresh repo instance to get updated contributor_count
+ repo.refresh_from_db()
+
+ # Fetch real-time top contributors from GitHub
+ github_contributors = self.get_github_top_contributors(repo.repo_url)
+ merged_contributors = []
+ for gh_contrib in github_contributors:
+ merged_contributors.append(
+ {
+ "name": gh_contrib["login"],
+ "github_id": gh_contrib["id"],
+ "avatar_url": gh_contrib["avatar_url"],
+ "contributions": gh_contrib["contributions"],
+ "github_url": gh_contrib["html_url"],
+ "verified": False,
+ }
+ )
+
+ return JsonResponse(
+ {
+ "status": "success",
+ "message": "Fetched real-time contributor data from GitHub.",
+ "data": {
+ "contributors": merged_contributors,
+ "total_contributors": repo.contributor_count,
+ },
+ }
+ )
+
+ except ValueError as e:
+ return JsonResponse(
+ {"status": "error", "message": "There was an error processing your data."},
+ status=400,
+ )
+
return super().post(request, *args, **kwargs)
From d175668588cb37812050a0af94e14e41a39d3b20 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 6 Jan 2025 00:05:38 +0000
Subject: [PATCH 50/52] chore(deps): Bump unstructured from 0.16.11 to 0.16.12
Bumps [unstructured](https://github.com/Unstructured-IO/unstructured) from 0.16.11 to 0.16.12.
- [Release notes](https://github.com/Unstructured-IO/unstructured/releases)
- [Changelog](https://github.com/Unstructured-IO/unstructured/blob/main/CHANGELOG.md)
- [Commits](https://github.com/Unstructured-IO/unstructured/compare/0.16.11...0.16.12)
---
updated-dependencies:
- dependency-name: unstructured
dependency-type: direct:production
update-type: version-update:semver-patch
...
Signed-off-by: dependabot[bot]
---
poetry.lock | 26 ++++++++++++++++++--------
pyproject.toml | 2 +-
2 files changed, 19 insertions(+), 9 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 414edc6b8..09f686a25 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
[[package]]
name = "aiohappyeyeballs"
@@ -2651,6 +2651,17 @@ files = [
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
+[[package]]
+name = "ndjson"
+version = "0.3.1"
+description = "JsonDecoder for ndjson"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ndjson-0.3.1-py2.py3-none-any.whl", hash = "sha256:839c22275e6baa3040077b83c005ac24199b94973309a8a1809be962c753a410"},
+ {file = "ndjson-0.3.1.tar.gz", hash = "sha256:bf9746cb6bb1cb53d172cda7f154c07c786d665ff28341e4e689b796b229e5d6"},
+]
+
[[package]]
name = "nest-asyncio"
version = "1.6.0"
@@ -3227,6 +3238,7 @@ files = [
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
@@ -3817,9 +3829,6 @@ files = [
{file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"},
]
-[package.dependencies]
-async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""}
-
[package.extras]
hiredis = ["hiredis (>=3.0.0)"]
ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"]
@@ -4498,13 +4507,13 @@ files = [
[[package]]
name = "unstructured"
-version = "0.16.11"
+version = "0.16.12"
description = "A library that prepares raw documents for downstream ML tasks."
optional = false
python-versions = "<3.13,>=3.9.0"
files = [
- {file = "unstructured-0.16.11-py3-none-any.whl", hash = "sha256:a92d5bc2c2b7bb23369641fb7a7f0daba1775639199306ce4cd83ca564a03763"},
- {file = "unstructured-0.16.11.tar.gz", hash = "sha256:33ebf68aae11ce33c8a96335296557b5abd8ba96eaba3e5a1554c0b9eee40bb5"},
+ {file = "unstructured-0.16.12-py3-none-any.whl", hash = "sha256:bcac29ac1b38fba4228c5a1a7721d1aa7c48220f7c1dd43b563645c56e978c49"},
+ {file = "unstructured-0.16.12.tar.gz", hash = "sha256:c3133731c6edb9c2f474e62cb2b560cd0a8d578c4532ec14d8c0941e401770b0"},
]
[package.dependencies]
@@ -4517,6 +4526,7 @@ filetype = "*"
html5lib = "*"
langdetect = "*"
lxml = "*"
+ndjson = "*"
nltk = "*"
numpy = "<2"
psutil = "*"
@@ -5009,4 +5019,4 @@ propcache = ">=0.2.0"
[metadata]
lock-version = "2.0"
python-versions = "3.11.2"
-content-hash = "d614aeb15753a479822bb32554445a54e6d3188ac1c97b064639ce2b74bed759"
+content-hash = "d66841992dbf0393a2c5160624eade4f0bc14a1ebd443be7b30cef25e0335f15"
diff --git a/pyproject.toml b/pyproject.toml
index e8119dfe8..b95f67bad 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -65,7 +65,7 @@ sentry-sdk = "^2.19.0"
bitcash = "^1.0.2"
pydantic = "^2.7.3"
pydantic_core = "^2.18.4"
-unstructured = "^0.16.8"
+unstructured = "^0.16.12"
Markdown = "^3.6"
faiss-cpu = "^1.8.0"
psutil = "^5.9.8"
From edfeb6944d7f6aed2832a0ad37acc0d02581fe46 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 6 Jan 2025 00:14:02 +0000
Subject: [PATCH 51/52] chore(deps-dev): Bump ruff from 0.8.4 to 0.8.6
Bumps [ruff](https://github.com/astral-sh/ruff) from 0.8.4 to 0.8.6.
- [Release notes](https://github.com/astral-sh/ruff/releases)
- [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md)
- [Commits](https://github.com/astral-sh/ruff/compare/0.8.4...0.8.6)
---
updated-dependencies:
- dependency-name: ruff
dependency-type: direct:development
update-type: version-update:semver-patch
...
Signed-off-by: dependabot[bot]
---
poetry.lock | 40 ++++++++++++++++++++--------------------
pyproject.toml | 2 +-
2 files changed, 21 insertions(+), 21 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 09f686a25..716435368 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -4005,29 +4005,29 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
-version = "0.8.4"
+version = "0.8.6"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.8.4-py3-none-linux_armv6l.whl", hash = "sha256:58072f0c06080276804c6a4e21a9045a706584a958e644353603d36ca1eb8a60"},
- {file = "ruff-0.8.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ffb60904651c00a1e0b8df594591770018a0f04587f7deeb3838344fe3adabac"},
- {file = "ruff-0.8.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ddf5d654ac0d44389f6bf05cee4caeefc3132a64b58ea46738111d687352296"},
- {file = "ruff-0.8.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e248b1f0fa2749edd3350a2a342b67b43a2627434c059a063418e3d375cfe643"},
- {file = "ruff-0.8.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf197b98ed86e417412ee3b6c893f44c8864f816451441483253d5ff22c0e81e"},
- {file = "ruff-0.8.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c41319b85faa3aadd4d30cb1cffdd9ac6b89704ff79f7664b853785b48eccdf3"},
- {file = "ruff-0.8.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9f8402b7c4f96463f135e936d9ab77b65711fcd5d72e5d67597b543bbb43cf3f"},
- {file = "ruff-0.8.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4e56b3baa9c23d324ead112a4fdf20db9a3f8f29eeabff1355114dd96014604"},
- {file = "ruff-0.8.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:736272574e97157f7edbbb43b1d046125fce9e7d8d583d5d65d0c9bf2c15addf"},
- {file = "ruff-0.8.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5fe710ab6061592521f902fca7ebcb9fabd27bc7c57c764298b1c1f15fff720"},
- {file = "ruff-0.8.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:13e9ec6d6b55f6da412d59953d65d66e760d583dd3c1c72bf1f26435b5bfdbae"},
- {file = "ruff-0.8.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:97d9aefef725348ad77d6db98b726cfdb075a40b936c7984088804dfd38268a7"},
- {file = "ruff-0.8.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ab78e33325a6f5374e04c2ab924a3367d69a0da36f8c9cb6b894a62017506111"},
- {file = "ruff-0.8.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8ef06f66f4a05c3ddbc9121a8b0cecccd92c5bf3dd43b5472ffe40b8ca10f0f8"},
- {file = "ruff-0.8.4-py3-none-win32.whl", hash = "sha256:552fb6d861320958ca5e15f28b20a3d071aa83b93caee33a87b471f99a6c0835"},
- {file = "ruff-0.8.4-py3-none-win_amd64.whl", hash = "sha256:f21a1143776f8656d7f364bd264a9d60f01b7f52243fbe90e7670c0dfe0cf65d"},
- {file = "ruff-0.8.4-py3-none-win_arm64.whl", hash = "sha256:9183dd615d8df50defa8b1d9a074053891ba39025cf5ae88e8bcb52edcc4bf08"},
- {file = "ruff-0.8.4.tar.gz", hash = "sha256:0d5f89f254836799af1615798caa5f80b7f935d7a670fad66c5007928e57ace8"},
+ {file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"},
+ {file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"},
+ {file = "ruff-0.8.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e88b8f6d901477c41559ba540beeb5a671e14cd29ebd5683903572f4b40a9807"},
+ {file = "ruff-0.8.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0509e8da430228236a18a677fcdb0c1f102dd26d5520f71f79b094963322ed25"},
+ {file = "ruff-0.8.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a7ddb221779871cf226100e677b5ea38c2d54e9e2c8ed847450ebbdf99b32d"},
+ {file = "ruff-0.8.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:248b1fb3f739d01d528cc50b35ee9c4812aa58cc5935998e776bf8ed5b251e75"},
+ {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3c083c50390cf69e7e1b5a5a7303898966be973664ec0c4a4acea82c1d4315"},
+ {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52d587092ab8df308635762386f45f4638badb0866355b2b86760f6d3c076188"},
+ {file = "ruff-0.8.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61323159cf21bc3897674e5adb27cd9e7700bab6b84de40d7be28c3d46dc67cf"},
+ {file = "ruff-0.8.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae4478b1471fc0c44ed52a6fb787e641a2ac58b1c1f91763bafbc2faddc5117"},
+ {file = "ruff-0.8.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c000a471d519b3e6cfc9c6680025d923b4ca140ce3e4612d1a2ef58e11f11fe"},
+ {file = "ruff-0.8.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9257aa841e9e8d9b727423086f0fa9a86b6b420fbf4bf9e1465d1250ce8e4d8d"},
+ {file = "ruff-0.8.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45a56f61b24682f6f6709636949ae8cc82ae229d8d773b4c76c09ec83964a95a"},
+ {file = "ruff-0.8.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:496dd38a53aa173481a7d8866bcd6451bd934d06976a2505028a50583e001b76"},
+ {file = "ruff-0.8.6-py3-none-win32.whl", hash = "sha256:e169ea1b9eae61c99b257dc83b9ee6c76f89042752cb2d83486a7d6e48e8f764"},
+ {file = "ruff-0.8.6-py3-none-win_amd64.whl", hash = "sha256:f1d70bef3d16fdc897ee290d7d20da3cbe4e26349f62e8a0274e7a3f4ce7a905"},
+ {file = "ruff-0.8.6-py3-none-win_arm64.whl", hash = "sha256:7d7fc2377a04b6e04ffe588caad613d0c460eb2ecba4c0ccbbfe2bc973cbc162"},
+ {file = "ruff-0.8.6.tar.gz", hash = "sha256:dcad24b81b62650b0eb8814f576fc65cfee8674772a6e24c9b747911801eeaa5"},
]
[[package]]
@@ -5019,4 +5019,4 @@ propcache = ">=0.2.0"
[metadata]
lock-version = "2.0"
python-versions = "3.11.2"
-content-hash = "d66841992dbf0393a2c5160624eade4f0bc14a1ebd443be7b30cef25e0335f15"
+content-hash = "807c866bfcd861d24cbd2a95912e12845ca06a689199504ba971618fa27d3b62"
diff --git a/pyproject.toml b/pyproject.toml
index b95f67bad..f74479c75 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -87,7 +87,7 @@ newrelic = "^10.4.0"
[tool.poetry.group.dev.dependencies]
black = "^24.8.0"
isort = "^5.13.2"
-ruff = "^0.8.4"
+ruff = "^0.8.6"
pre-commit = "^3.8.0"
[tool.isort]
From 6c7c2ff5dfda25bf59d0b1b06e02f8d4f0833b05 Mon Sep 17 00:00:00 2001
From: Altafur Rahman
Date: Mon, 6 Jan 2025 21:54:11 +0600
Subject: [PATCH 52/52] feat: add ContributorStats model for tracking
contributor activity (#3208)
---
website/migrations/0179_contributorstats.py | 60 +++++++++++++++++++++
website/models.py | 40 ++++++++++----
2 files changed, 89 insertions(+), 11 deletions(-)
create mode 100644 website/migrations/0179_contributorstats.py
diff --git a/website/migrations/0179_contributorstats.py b/website/migrations/0179_contributorstats.py
new file mode 100644
index 000000000..08fea502b
--- /dev/null
+++ b/website/migrations/0179_contributorstats.py
@@ -0,0 +1,60 @@
+# Generated by Django 5.1.3 on 2025-01-06 13:17
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("website", "0178_alter_ip_agent"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="ContributorStats",
+ fields=[
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("date", models.DateField()),
+ ("commits", models.PositiveIntegerField(default=0)),
+ ("issues_opened", models.PositiveIntegerField(default=0)),
+ ("issues_closed", models.PositiveIntegerField(default=0)),
+ ("pull_requests", models.PositiveIntegerField(default=0)),
+ ("comments", models.PositiveIntegerField(default=0)),
+ (
+ "granularity",
+ models.CharField(
+ choices=[("day", "Day"), ("month", "Month")],
+ default="day",
+ max_length=10,
+ ),
+ ),
+ (
+ "contributor",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="stats",
+ to="website.contributor",
+ ),
+ ),
+ (
+ "repo",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="stats",
+ to="website.repo",
+ ),
+ ),
+ ],
+ options={
+ "unique_together": {("contributor", "repo", "date", "granularity")},
+ },
+ ),
+ ]
diff --git a/website/models.py b/website/models.py
index a9029ae03..4e4735a7b 100644
--- a/website/models.py
+++ b/website/models.py
@@ -935,17 +935,6 @@ def __str__(self):
return self.name
-# class ContributorStats(models.Model):
-# username = models.CharField(max_length=255, unique=True)
-# commits = models.IntegerField(default=0)
-# issues_opened = models.IntegerField(default=0)
-# issues_closed = models.IntegerField(default=0)
-# prs = models.IntegerField(default=0)
-# comments = models.IntegerField(default=0)
-# assigned_issues = models.IntegerField(default=0)
-# created = models.DateTimeField(auto_now_add=True)
-
-
class Contribution(models.Model):
CONTRIBUTION_TYPES = [
("commit", "Commit"),
@@ -1316,3 +1305,32 @@ def save(self, *args, **kwargs):
def __str__(self):
return f"{self.project.name}/{self.name}"
+
+
+class ContributorStats(models.Model):
+ contributor = models.ForeignKey(Contributor, on_delete=models.CASCADE, related_name="stats")
+ repo = models.ForeignKey(Repo, on_delete=models.CASCADE, related_name="stats")
+
+ # This will represent either a specific day or the first day of a month.
+ date = models.DateField()
+
+ # Store counts
+ commits = models.PositiveIntegerField(default=0)
+ issues_opened = models.PositiveIntegerField(default=0)
+ issues_closed = models.PositiveIntegerField(default=0)
+ pull_requests = models.PositiveIntegerField(default=0)
+ comments = models.PositiveIntegerField(default=0)
+
+ # "day" for daily entries, "month" for monthly entries
+ granularity = models.CharField(
+ max_length=10, choices=[("day", "Day"), ("month", "Month")], default="day"
+ )
+
+ class Meta:
+ # You can't have two different stats for the same date+granularity
+ unique_together = ("contributor", "repo", "date", "granularity")
+
+ def __str__(self):
+ return (
+ f"{self.contributor.name} in {self.repo.name} " f"on {self.date} [{self.granularity}]"
+ )