diff --git a/website/admin.py b/website/admin.py index 2f4ecf636..544f0963a 100644 --- a/website/admin.py +++ b/website/admin.py @@ -14,6 +14,7 @@ ChatBotLog, Contribution, Contributor, + ContributorStats, Domain, Hunt, HuntPrize, @@ -424,6 +425,11 @@ class ContributorAdmin(admin.ModelAdmin): search_fields = ["name", "github_id"] +class ContributorStatsAdmin(admin.ModelAdmin): + list_display = ("contributor", "repo", "date", "granularity") + search_fields = ["contributor", "repo"] + + class TagAdmin(admin.ModelAdmin): list_display = ("name", "slug", "created") prepopulated_fields = {"slug": ("name",)} @@ -456,6 +462,7 @@ class PostAdmin(admin.ModelAdmin): admin.site.register(Project, ProjectAdmin) admin.site.register(Repo, RepoAdmin) admin.site.register(Contributor, ContributorAdmin) +admin.site.register(ContributorStats, ContributorStatsAdmin) admin.site.register(Bid, BidAdmin) admin.site.register(UserProfile, UserProfileAdmin) admin.site.register(User, UserAdmin) diff --git a/website/management/commands/update_contributor_stats.py b/website/management/commands/update_contributor_stats.py new file mode 100644 index 000000000..d8566fbc5 --- /dev/null +++ b/website/management/commands/update_contributor_stats.py @@ -0,0 +1,467 @@ +import time +from datetime import datetime, timedelta + +import pytz +import requests +from dateutil.relativedelta import relativedelta +from django.conf import settings +from django.core.management.base import BaseCommand, CommandError +from django.db import transaction + +from website.models import Contributor, ContributorStats, Repo + + +class Command(BaseCommand): + help = "Update contributor statistics with daily and monthly granularity" + + def add_arguments(self, parser): + parser.add_argument( + "--repo_id", + type=int, + help="Repository ID from the database", + ) + + def handle(self, *args, **options): + repo_id = options.get("repo_id") + + if repo_id: + try: + repo = Repo.objects.get(id=repo_id) + self.update_stats_for_repo(repo) + except Repo.DoesNotExist: + raise CommandError(f"Repository with ID {repo_id} not found") + # Update all repositories + # for repo in Repo.objects.all(): + # self.update_stats_for_repo(repo) + + def update_stats_for_repo(self, repo): + self.stdout.write(f"Updating stats for repository: {repo.name}") + + owner, repo_name = self.parse_github_url(repo.repo_url) + + # Calculate current month date range + today = datetime.now(pytz.UTC).date() + current_month_start = today.replace(day=1) # First day of current month + + # Delete existing daily stats for current month + self.delete_existing_daily_stats(repo, current_month_start) + + # Fetch and store daily stats for current month + daily_stats = self.fetch_contributor_stats(owner, repo_name, current_month_start, today) + self.store_daily_stats(repo, daily_stats) + + # Handle monthly stats + self.update_monthly_stats(repo, current_month_start) + + def parse_github_url(self, url): + # Remove trailing slash if present + url = url.rstrip("/") + # Extract owner and repo name from GitHub URL + parts = url.split("/") + return parts[-2], parts[-1] + + def delete_existing_daily_stats(self, repo, current_month_start): + """Delete existing daily stats for the current month""" + with transaction.atomic(): + ContributorStats.objects.filter( + repo=repo, granularity="day", date__gte=current_month_start + ).delete() + + def fetch_contributor_stats(self, owner, repo_name, start_date, end_date): + """Fetch contributor statistics using GitHub REST API""" + headers = { + "Authorization": f"token {settings.GITHUB_TOKEN}", + "Accept": "application/vnd.github.v3+json", + } + + stats = {} # {(date, login): {commits: X, issues_opened: Y, ...}} + + # Helper function to handle pagination + def get_paginated_data(url, params=None): + all_data = [] + while url: + response = requests.get(url, headers=headers, params=params) + + if response.status_code == 403: # Rate limit exceeded + reset_timestamp = int(response.headers.get("X-RateLimit-Reset", 0)) + sleep_time = max(reset_timestamp - time.time(), 0) + 1 + self.stdout.write(f"Rate limit exceeded. Waiting {sleep_time} seconds...") + time.sleep(sleep_time) + continue + + if response.status_code != 200: + self.stdout.write( + self.style.WARNING(f"API error: {response.status_code} - {response.text}") + ) + break + + data = response.json() + all_data.extend(data) + + # Check for next page in Link header + if "Link" in response.headers: + links = requests.utils.parse_header_links(response.headers["Link"]) + next_url = next((link["url"] for link in links if link["rel"] == "next"), None) + if next_url: + url = next_url + params = None # Params are included in the next_url + else: + break + else: + break + + return all_data + + try: + # 1. Fetch commits + commits_url = f"https://api.github.com/repos/{owner}/{repo_name}/commits" + commits_params = { + "since": start_date.isoformat(), + "until": end_date.isoformat(), + "per_page": 100, + } + commits = get_paginated_data(commits_url, commits_params) + + for commit in commits: + if commit.get("author") and commit.get("commit", {}).get("author", {}).get("date"): + date = datetime.strptime( + commit["commit"]["author"]["date"], "%Y-%m-%dT%H:%M:%SZ" + ).date() + login = commit["author"].get("login") + if login: + self.increment_stat(stats, date, login, "commits") + + # 2. Fetch issues + issues_url = f"https://api.github.com/repos/{owner}/{repo_name}/issues" + issues_params = {"state": "all", "since": start_date.isoformat(), "per_page": 100} + issues = get_paginated_data(issues_url, issues_params) + + for issue in issues: + if not issue.get("pull_request"): # Skip pull requests + if issue.get("user", {}).get("login"): + login = issue["user"]["login"] + + # Handle issue creation + created_date = datetime.strptime( + issue["created_at"], "%Y-%m-%dT%H:%M:%SZ" + ).date() + if start_date <= created_date <= end_date: + self.increment_stat(stats, created_date, login, "issues_opened") + + # Handle issue closure + if issue.get("closed_at"): + closed_date = datetime.strptime( + issue["closed_at"], "%Y-%m-%dT%H:%M:%SZ" + ).date() + if start_date <= closed_date <= end_date: + self.increment_stat(stats, closed_date, login, "issues_closed") + + # 3. Fetch pull requests + pulls_url = f"https://api.github.com/repos/{owner}/{repo_name}/pulls" + pulls_params = {"state": "all", "sort": "updated", "direction": "desc", "per_page": 100} + pulls = get_paginated_data(pulls_url, pulls_params) + + for pr in pulls: + if pr.get("user", {}).get("login"): + login = pr["user"]["login"] + created_date = datetime.strptime(pr["created_at"], "%Y-%m-%dT%H:%M:%SZ").date() + if start_date <= created_date <= end_date: + self.increment_stat(stats, created_date, login, "pull_requests") + + # 4. Fetch comments + comments_url = f"https://api.github.com/repos/{owner}/{repo_name}/issues/comments" + comments_params = {"since": start_date.isoformat(), "per_page": 100} + comments = get_paginated_data(comments_url, comments_params) + + for comment in comments: + if comment.get("user", {}).get("login"): + login = comment["user"]["login"] + comment_date = datetime.strptime( + comment["created_at"], "%Y-%m-%dT%H:%M:%SZ" + ).date() + if start_date <= comment_date <= end_date: + self.increment_stat(stats, comment_date, login, "comments") + + return stats + + except requests.exceptions.RequestException as e: + self.stdout.write(self.style.ERROR(f"Network error: {str(e)}")) + return {} + + def increment_stat(self, stats, date, login, stat_type): + """Helper method to increment statistics""" + key = (date, login) + if key not in stats: + stats[key] = { + "commits": 0, + "issues_opened": 0, + "issues_closed": 0, + "pull_requests": 0, + "comments": 0, + } + stats[key][stat_type] += 1 + + def store_daily_stats(self, repo, stats): + """Store daily statistics in the database""" + with transaction.atomic(): + for (date, login), day_stats in stats.items(): + contributor, _ = self.get_or_create_contributor(login) + + ContributorStats.objects.create( + contributor=contributor, repo=repo, date=date, granularity="day", **day_stats + ) + + def update_monthly_stats(self, repo, start_date): + """Update monthly statistics by fetching directly from GitHub API""" + owner, repo_name = self.parse_github_url(repo.repo_url) + + # Get the last monthly stat to know where to start from + last_monthly_stat = ( + ContributorStats.objects.filter(repo=repo, granularity="month") + .order_by("-date") + .first() + ) + + if last_monthly_stat: + # Start from the month after the last stored monthly stat + current_month_start = (last_monthly_stat.date + relativedelta(months=1)).replace(day=1) + else: + # Get repo creation date from GitHub API + try: + headers = { + "Authorization": f"token {settings.GITHUB_TOKEN}", + "Accept": "application/vnd.github.v3+json", + } + repo_api_url = f"https://api.github.com/repos/{owner}/{repo_name}" + response = requests.get(repo_api_url, headers=headers) + + if response.status_code != 200: + self.stdout.write( + self.style.ERROR(f"Failed to fetch repo data: {response.text}") + ) + return + + repo_data = response.json() + repo_created_at = datetime.strptime( + repo_data["created_at"], "%Y-%m-%dT%H:%M:%SZ" + ).date() + except Exception as e: + self.stdout.write(self.style.ERROR(f"Error fetching repo creation date: {str(e)}")) + return + # Start from repo creation date's month + current_month_start = repo_created_at.replace(day=1) + + # Process each month until last month (not including current month) + today = datetime.now().date() + last_month_end = today.replace(day=1) - timedelta(days=1) + last_month_start = last_month_end.replace(day=1) + + while current_month_start < last_month_start: + month_end = current_month_start + relativedelta(months=1) - timedelta(days=1) + + self.stdout.write(f"Fetching stats for month: {current_month_start} to {month_end}") + + monthly_stats = self.fetch_monthly_contributor_stats( + owner, repo_name, current_month_start, month_end + ) + + if monthly_stats: + self.store_monthly_stats(repo, current_month_start, monthly_stats) + + current_month_start += relativedelta(months=1) + + def fetch_monthly_contributor_stats(self, owner, repo_name, month_start, month_end): + """Fetch and aggregate monthly statistics directly from GitHub""" + headers = { + "Authorization": f"token {settings.GITHUB_TOKEN}", + "Accept": "application/vnd.github.v3+json", + } + + monthly_stats = {} # {login: {commits: X, issues_opened: Y, ...}} + + # Define pagination helper function inside this method + def get_paginated_data(url, params=None): + all_data = [] + while url: + response = requests.get(url, headers=headers, params=params) + + if response.status_code == 403: # Rate limit exceeded + reset_timestamp = int(response.headers.get("X-RateLimit-Reset", 0)) + sleep_time = max(reset_timestamp - time.time(), 0) + 1 + self.stdout.write(f"Rate limit exceeded. Waiting {sleep_time} seconds...") + time.sleep(sleep_time) + continue + + if response.status_code != 200: + self.stdout.write( + self.style.WARNING(f"API error: {response.status_code} - {response.text}") + ) + break + + data = response.json() + all_data.extend(data) + + # Check for next page in Link header + if "Link" in response.headers: + links = requests.utils.parse_header_links(response.headers["Link"]) + next_url = next((link["url"] for link in links if link["rel"] == "next"), None) + if next_url: + url = next_url + params = None # Params are included in the next_url + else: + break + else: + break + + return all_data + + try: + # Rest of the existing fetch_monthly_contributor_stats code, but now using the local get_paginated_data function + commits_url = f"https://api.github.com/repos/{owner}/{repo_name}/commits" + commits_params = { + "since": month_start.isoformat(), + "until": month_end.isoformat(), + "per_page": 100, + } + commits = get_paginated_data(commits_url, commits_params) + + # 2. Fetch issues for the month + issues_url = f"https://api.github.com/repos/{owner}/{repo_name}/issues" + issues_params = {"state": "all", "since": month_start.isoformat(), "per_page": 100} + issues = get_paginated_data(issues_url, issues_params) + + # 3. Fetch PRs for the month + pulls_url = f"https://api.github.com/repos/{owner}/{repo_name}/pulls" + pulls_params = {"state": "all", "since": month_start.isoformat(), "per_page": 100} + pulls = get_paginated_data(pulls_url, pulls_params) + + # 4. Fetch comments for the month + comments_url = f"https://api.github.com/repos/{owner}/{repo_name}/issues/comments" + comments_params = {"since": month_start.isoformat(), "per_page": 100} + comments = get_paginated_data(comments_url, comments_params) + + # Process commits + for commit in commits: + if commit.get("author") and commit.get("commit", {}).get("author", {}).get("date"): + date = datetime.strptime( + commit["commit"]["author"]["date"], "%Y-%m-%dT%H:%M:%SZ" + ).date() + if month_start <= date <= month_end: + login = commit["author"].get("login") + if login: + self.increment_monthly_stat(monthly_stats, login, "commits") + + # Process issues + for issue in issues: + if not issue.get("pull_request"): # Skip pull requests + login = issue.get("user", {}).get("login") + if not login: + continue + + created_date = datetime.strptime( + issue["created_at"], "%Y-%m-%dT%H:%M:%SZ" + ).date() + if month_start <= created_date <= month_end: + self.increment_monthly_stat(monthly_stats, login, "issues_opened") + + if issue.get("closed_at"): + closed_date = datetime.strptime( + issue["closed_at"], "%Y-%m-%dT%H:%M:%SZ" + ).date() + if month_start <= closed_date <= month_end: + self.increment_monthly_stat(monthly_stats, login, "issues_closed") + + # Process pull requests + for pr in pulls: + login = pr.get("user", {}).get("login") + if ( + login + and month_start + <= datetime.strptime(pr["created_at"], "%Y-%m-%dT%H:%M:%SZ").date() + <= month_end + ): + self.increment_monthly_stat(monthly_stats, login, "pull_requests") + + # Process comments + for comment in comments: + login = comment.get("user", {}).get("login") + if ( + login + and month_start + <= datetime.strptime(comment["created_at"], "%Y-%m-%dT%H:%M:%SZ").date() + <= month_end + ): + self.increment_monthly_stat(monthly_stats, login, "comments") + + return monthly_stats + + except Exception as e: + self.stdout.write(self.style.ERROR(f"Error fetching monthly stats: {str(e)}")) + return {} + + def increment_monthly_stat(self, stats, login, stat_type): + """Helper method to increment monthly statistics""" + if login not in stats: + stats[login] = { + "commits": 0, + "issues_opened": 0, + "issues_closed": 0, + "pull_requests": 0, + "comments": 0, + } + stats[login][stat_type] += 1 + + def store_monthly_stats(self, repo, month_start, monthly_stats): + """Store monthly statistics in the database""" + with transaction.atomic(): + # Delete existing monthly stat for this month if exists + ContributorStats.objects.filter( + repo=repo, granularity="month", date=month_start + ).delete() + + # Create new monthly stats + for login, stats in monthly_stats.items(): + # First get or create the contributor + contributor, _ = self.get_or_create_contributor(login) + + # Create the monthly stat record with the contributor object + ContributorStats.objects.create( + contributor=contributor, + repo=repo, + date=month_start, + granularity="month", + **stats, + ) + + def get_or_create_contributor(self, login): + """Get or create a contributor record""" + # Make a request to get contributor details from GitHub + response = requests.get( + f"https://api.github.com/users/{login}", + headers={"Authorization": f"token {settings.GITHUB_TOKEN}"}, + ) + + if response.status_code == 200: + data = response.json() + return Contributor.objects.get_or_create( + github_id=data["id"], + defaults={ + "name": login, + "github_url": data["html_url"], + "avatar_url": data["avatar_url"], + "contributor_type": "User" if not data.get("type") == "Bot" else "Bot", + "contributions": 0, # This will be updated later + }, + ) + else: + # Fallback to creating with minimal information + return Contributor.objects.get_or_create( + name=login, + defaults={ + "github_id": 0, + "github_url": f"https://github.com/{login}", + "avatar_url": "", + "contributor_type": "User", + "contributions": 0, + }, + ) diff --git a/website/templates/projects/_contributor_stats_table.html b/website/templates/projects/_contributor_stats_table.html new file mode 100644 index 000000000..e6cb0eec8 --- /dev/null +++ b/website/templates/projects/_contributor_stats_table.html @@ -0,0 +1,102 @@ +{% load humanize %} + +
+ + + + + + + + + + + + + + {% for stat in contributor_stats %} + + + + + + + + + + {% empty %} + + + + {% endfor %} + +
ContributorCommitsIssues OpenedIssues ClosedPull RequestsCommentsTotal Impact
+
+ {{ stat.contributor.name }} +
+
{{ stat.contributor.name }}
+
@{{ stat.contributor.github_id }}
+
+
+
+ {{ stat.commits|default:0|intcomma }} + + {{ stat.issues_opened|default:0|intcomma }} + + {{ stat.issues_closed|default:0|intcomma }} + + {{ stat.pull_requests|default:0|intcomma }} + + {{ stat.comments|default:0|intcomma }} + +
+ {{ stat.impact_level.text }} +
+
No contributor statistics available for this period
+ {% if is_paginated %} +
+
+
+

+ Showing + {{ page_obj.start_index }} + to + {{ page_obj.end_index }} + of + {{ paginator.count }} + contributors +

+
+
+ +
+
+
+ {% endif %} +
diff --git a/website/templates/projects/repo_detail.html b/website/templates/projects/repo_detail.html index 83ed4ab96..acc4ec346 100644 --- a/website/templates/projects/repo_detail.html +++ b/website/templates/projects/repo_detail.html @@ -393,6 +393,145 @@

Top Contributors

+ +
+
+
+

+ + + + Contributor Statistics +

+
+ +
+ +
+ + +
+
+ +
+ + + + + + + + + + + + + + {% for stat in contributor_stats %} + + + + + + + + + + {% empty %} + + + + {% endfor %} + +
ContributorCommitsIssues OpenedIssues ClosedPull RequestsCommentsTotal Impact
+
+ {{ stat.contributor.name }} +
+
{{ stat.contributor.name }}
+
@{{ stat.contributor.github_id }}
+
+
+
+ {{ stat.commits|default:0|intcomma }} + + {{ stat.issues_opened|default:0|intcomma }} + + {{ stat.issues_closed|default:0|intcomma }} + + {{ stat.pull_requests|default:0|intcomma }} + + {{ stat.comments|default:0|intcomma }} + +
+ {{ stat.impact_level.text }} +
+
No contributor statistics available for this period
+ {% if is_paginated %} +
+
+
+

+ Showing + {{ page_obj.start_index }} + to + {{ page_obj.end_index }} + of + {{ paginator.count }} + contributors +

+
+
+ +
+
+
+ {% endif %} +
+
+
@@ -739,6 +878,12 @@

{ + if (!response.ok) throw new Error('Network response was not ok'); + return response.text(); + }) + .then(html => { + tableContainer.innerHTML = html; + // Update URL without page reload + window.history.pushState({}, '', currentUrl.toString()); + + // Re-attach event listeners to new pagination buttons + attachPaginationListeners(); + }) + .catch(error => { + console.error('Error:', error); + tableContainer.classList.remove('opacity-50'); + }) + .finally(() => { + tableContainer.classList.remove('opacity-50'); + }); +} + +// Function to attach pagination event listeners +function attachPaginationListeners() { + document.querySelectorAll('.pagination-link').forEach(button => { + button.addEventListener('click', (e) => { + e.preventDefault(); + const page = e.target.dataset.page; + const timePeriod = document.getElementById('time-period-select').value; + updateContributorStats(timePeriod, page); + }); + }); +} + +// Initial attachment of listeners +document.addEventListener('DOMContentLoaded', attachPaginationListeners); + {% endblock after_js %} diff --git a/website/views/project.py b/website/views/project.py index 9b7384686..322b173b4 100644 --- a/website/views/project.py +++ b/website/views/project.py @@ -3,6 +3,7 @@ import re import socket import time +from calendar import monthrange from datetime import datetime, timedelta from pathlib import Path from urllib.parse import urlparse @@ -11,15 +12,18 @@ # import matplotlib.pyplot as plt import requests +from dateutil.relativedelta import relativedelta from django.conf import settings from django.contrib import messages from django.contrib.auth.decorators import login_required, user_passes_test from django.contrib.humanize.templatetags.humanize import naturaltime from django.core.exceptions import ValidationError +from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator from django.core.validators import URLValidator from django.db.models import Q, Sum from django.http import JsonResponse from django.shortcuts import redirect, render +from django.utils import timezone from django.utils.dateparse import parse_datetime from django.utils.text import slugify from django.utils.timezone import localtime, now @@ -29,7 +33,15 @@ from website.bitcoin_utils import create_bacon_token from website.forms import GitHubURLForm -from website.models import BaconToken, Contribution, Organization, Project, Repo +from website.models import ( + BaconToken, + Contribution, + Contributor, + ContributorStats, + Organization, + Project, + Repo, +) from website.utils import admin_required # logging.getLogger("matplotlib").setLevel(logging.ERROR) @@ -932,9 +944,150 @@ def get_context_data(self, **kwargs): for c in repo.contributor.all()[:6] ] + # Get time period from request, default to + time_period = self.request.GET.get("time_period", "current_month") + page_number = self.request.GET.get("page", 1) + # if time_period exist in request.post do something + if self.request.method == "POST": + time_period = self.request.POST.get("time_period", "current_month") + page_number = self.request.POST.get("page", 1) + + # Calculate date range based on time period + end_date = timezone.now().date() + start_date = None + + if time_period == "today": + start_date = end_date + elif time_period == "current_month": + start_date = end_date.replace(day=1) + elif time_period == "last_month": + last_month = end_date - relativedelta(months=1) + start_date = last_month.replace(day=1) + end_date = last_month.replace(day=monthrange(last_month.year, last_month.month)[1]) + elif time_period == "last_6_months": + start_date = end_date - relativedelta(months=6) + elif time_period == "last_year": + start_date = end_date - relativedelta(years=1) + elif time_period == "all_time": + # Get repository creation date from GitHub + try: + owner, repo_name = repo.repo_url.rstrip("/").split("/")[-2:] + headers = { + "Authorization": f"token {settings.GITHUB_TOKEN}", + "Accept": "application/vnd.github.v3+json", + } + response = requests.get( + f"https://api.github.com/repos/{owner}/{repo_name}", headers=headers + ) + if response.status_code == 200: + repo_data = response.json() + start_date = datetime.strptime( + repo_data["created_at"], "%Y-%m-%dT%H:%M:%SZ" + ).date() + else: + start_date = end_date - relativedelta(years=1) # Fallback to 1 year + except Exception: + start_date = end_date - relativedelta(years=1) # Fallback to 1 year + + # Query contributor stats + stats_query = ContributorStats.objects.filter( + repo=repo, date__gte=start_date, date__lte=end_date + ) + + # Aggregate the stats + stats_query = ( + stats_query.values("contributor") + .annotate( + total_commits=Sum("commits"), + total_issues_opened=Sum("issues_opened"), + total_issues_closed=Sum("issues_closed"), + total_prs=Sum("pull_requests"), + total_comments=Sum("comments"), + ) + .order_by("-total_commits") + ) + + # Calculate impact scores and enrich with contributor details + processed_stats = [] + for stat in stats_query: + contributor = Contributor.objects.get(id=stat["contributor"]) + + # Calculate impact score using weighted values + impact_score = ( + stat["total_commits"] * 5 + + stat["total_prs"] * 3 + + stat["total_issues_opened"] * 2 + + stat["total_issues_closed"] * 2 + + stat["total_comments"] + ) + + # Determine impact level based on score + if impact_score > 200: + impact_level = {"class": "bg-green-100 text-green-800", "text": "High Impact"} + elif impact_score > 100: + impact_level = {"class": "bg-yellow-100 text-yellow-800", "text": "Medium Impact"} + else: + impact_level = {"class": "bg-blue-100 text-blue-800", "text": "Growing Impact"} + + processed_stats.append( + { + "contributor": contributor, + "commits": stat["total_commits"], + "issues_opened": stat["total_issues_opened"], + "issues_closed": stat["total_issues_closed"], + "pull_requests": stat["total_prs"], + "comments": stat["total_comments"], + "impact_score": impact_score, + "impact_level": impact_level, + } + ) + + # Sort processed stats by impact score + processed_stats.sort(key=lambda x: x["impact_score"], reverse=True) + + # Set up pagination + paginator = Paginator(processed_stats, 10) # Changed from 2 to 10 entries per page + try: + paginated_stats = paginator.page(page_number) + except PageNotAnInteger: + paginated_stats = paginator.page(1) + except EmptyPage: + paginated_stats = paginator.page(paginator.num_pages) + + # Prepare time period options + time_period_options = [ + ("today", "Today's Data"), + ("current_month", "Current Month"), + ("last_month", "Last Month"), + ("last_6_months", "Last 6 Months"), + ("last_year", "1 Year"), + ("all_time", "All Time"), + ] + + # Add to context + context.update( + { + "contributor_stats": paginated_stats, + "page_obj": paginated_stats, # Add this + "paginator": paginator, # Add this + "time_period": time_period, + "time_period_options": time_period_options, + "start_date": start_date, + "end_date": end_date, + "is_paginated": paginator.num_pages > 1, # Add this + } + ) + return context def post(self, request, *args, **kwargs): + self.object = self.get_object() # Fix the missing object attribute + + if request.headers.get("X-Requested-With") == "XMLHttpRequest": + if "time_period" in request.POST: + context = self.get_context_data() + return render(request, "projects/_contributor_stats_table.html", context) + def get_issue_count(full_name, query, headers): search_url = f"https://api.github.com/search/issues?q=repo:{full_name}+{query}" resp = requests.get(search_url, headers=headers) @@ -1276,4 +1429,28 @@ def get_issue_count(full_name, query, headers): status=400, ) + elif section == "contributor_stats": + try: + repo = self.get_object() + # we have to run a management command to fetch the contributor stats + from django.core.management import call_command + + call_command("update_contributor_stats", "--repo_id", repo.id) + + return JsonResponse( + { + "status": "success", + "message": "Contributor statistics updated successfully", + } + ) + + except Exception as e: + return JsonResponse( + { + "status": "error", + "message": "An unexpected error occurred", + }, + status=500, + ) + return super().post(request, *args, **kwargs)