-
-
Notifications
You must be signed in to change notification settings - Fork 291
feat: Add Contribution Heatmap to Chapter and Project Pages #2674
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 38 commits
6c01854
0dabbb0
022708c
ba82c05
7d8bae7
35054a5
a19457d
9af40eb
fef78d2
f535ab0
f82b793
50b314c
de69692
7315360
4b7cae1
080d138
77a04d4
37320b5
7bb15d3
bb03cec
a3ae3f1
c4d6aab
47014ad
57a4af0
af7c1d6
78e1553
cc8c487
8b384b8
ecff8f5
9f08319
98a4e34
0e8dbde
0fad13f
4b4135f
2f7e3db
b2747d1
7ca8cd0
bd2bc94
27025a4
e78fd23
5d08daf
4f69257
4058c9d
814f38c
d7a3556
573476b
0cf7e93
0999457
5c609ff
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,281 @@ | ||
| """Management command to aggregate contributions for chapters and projects.""" | ||
|
|
||
| from datetime import datetime, timedelta | ||
|
|
||
| from django.core.management.base import BaseCommand | ||
| from django.utils import timezone | ||
|
|
||
| from apps.github.models.commit import Commit | ||
| from apps.github.models.issue import Issue | ||
| from apps.github.models.pull_request import PullRequest | ||
| from apps.github.models.release import Release | ||
| from apps.owasp.models.chapter import Chapter | ||
| from apps.owasp.models.project import Project | ||
|
|
||
|
|
||
| class Command(BaseCommand): | ||
| """Aggregate contribution data for chapters and projects.""" | ||
|
|
||
| help = "Aggregate contributions (commits, issues, PRs, releases) for chapters and projects" | ||
|
|
||
| def add_arguments(self, parser): | ||
| """Add command arguments.""" | ||
| parser.add_argument( | ||
| "--entity-type", | ||
| type=str, | ||
| choices=["chapter", "project", "both"], | ||
| default="both", | ||
| help="Entity type to aggregate: chapter, project, or both", | ||
| ) | ||
| parser.add_argument( | ||
| "--days", | ||
| type=int, | ||
| default=365, | ||
| help="Number of days to look back for contributions (default: 365)", | ||
| ) | ||
| parser.add_argument( | ||
| "--key", | ||
| type=str, | ||
| help="Specific chapter or project key to aggregate", | ||
| ) | ||
| parser.add_argument( | ||
| "--offset", | ||
| type=int, | ||
| default=0, | ||
| help="Skip the first N entities", | ||
| ) | ||
|
|
||
| def _aggregate_contribution_dates( | ||
| self, | ||
| queryset, | ||
| date_field: str, | ||
| contribution_map: dict[str, int], | ||
| ) -> None: | ||
| """Aggregate contribution dates from a queryset into the contribution map. | ||
| Args: | ||
| queryset: Django queryset to aggregate | ||
| date_field: Name of the date field to aggregate on | ||
| contribution_map: Dictionary to update with counts | ||
| """ | ||
| dates = queryset.values_list(date_field, flat=True) | ||
| for date_value in dates: | ||
| if date_value: | ||
| date_key = date_value.date().isoformat() | ||
| contribution_map[date_key] = contribution_map.get(date_key, 0) + 1 | ||
|
Comment on lines
+48
to
+66
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Aggregation loses contribution type information, forcing frontend estimation. The To enable accurate frontend display, modify the aggregation to track contribution types: def _aggregate_contribution_dates(
self,
queryset,
date_field: str,
contribution_type: str, # Add type parameter
contribution_map: dict[str, dict[str, int]], # Nested structure
) -> None:
"""Aggregate contribution dates by type into the contribution map.
Args:
queryset: Django queryset to aggregate
date_field: Name of the date field to aggregate on
contribution_type: Type of contribution (commits, issues, pullRequests, releases)
contribution_map: Nested dict {date: {type: count}}
"""
dates = queryset.values_list(date_field, flat=True)
for date_value in dates:
if date_value:
date_key = date_value.date().isoformat()
if date_key not in contribution_map:
contribution_map[date_key] = {}
contribution_map[date_key][contribution_type] = (
contribution_map[date_key].get(contribution_type, 0) + 1
)Then update callers: # In aggregate_chapter_contributions and aggregate_project_contributions
contribution_map: dict[str, dict[str, int]] = {}
self._aggregate_contribution_dates(
Commit.objects.filter(...),
"created_at",
"commits", # Add type
contribution_map,
)
self._aggregate_contribution_dates(
Issue.objects.filter(...),
"created_at",
"issues", # Add type
contribution_map,
)
# ... repeat for pullRequests and releasesThis requires coordinated updates to:
🤖 Prompt for AI Agents |
||
|
|
||
| def aggregate_chapter_contributions( | ||
| self, | ||
| chapter: Chapter, | ||
| start_date: datetime, | ||
| ) -> dict[str, int]: | ||
| """Aggregate contributions for a chapter. | ||
| Args: | ||
| chapter: Chapter instance | ||
| start_date: Start date for aggregation | ||
| Returns: | ||
| Dictionary mapping YYYY-MM-DD to contribution count | ||
| """ | ||
| contribution_map: dict[str, int] = {} | ||
|
|
||
| if not chapter.owasp_repository: | ||
| return contribution_map | ||
|
|
||
| repository = chapter.owasp_repository | ||
|
|
||
| # Aggregate commits | ||
| self._aggregate_contribution_dates( | ||
| Commit.objects.filter( | ||
| repository=repository, | ||
| created_at__gte=start_date, | ||
| ), | ||
| "created_at", | ||
| contribution_map, | ||
| ) | ||
|
|
||
| # Aggregate issues | ||
| self._aggregate_contribution_dates( | ||
| Issue.objects.filter( | ||
| repository=repository, | ||
| created_at__gte=start_date, | ||
| ), | ||
| "created_at", | ||
| contribution_map, | ||
| ) | ||
|
|
||
| # Aggregate pull requests | ||
| self._aggregate_contribution_dates( | ||
| PullRequest.objects.filter( | ||
| repository=repository, | ||
| created_at__gte=start_date, | ||
| ), | ||
| "created_at", | ||
| contribution_map, | ||
| ) | ||
|
|
||
| # Aggregate releases (exclude drafts) | ||
| self._aggregate_contribution_dates( | ||
| Release.objects.filter( | ||
| repository=repository, | ||
| published_at__gte=start_date, | ||
| is_draft=False, | ||
| ), | ||
| "published_at", | ||
| contribution_map, | ||
| ) | ||
|
|
||
| return contribution_map | ||
|
|
||
| def aggregate_project_contributions( | ||
| self, | ||
| project: Project, | ||
| start_date: datetime, | ||
| ) -> dict[str, int]: | ||
| """Aggregate contributions for a project across all its repositories. | ||
| Args: | ||
| project: Project instance | ||
| start_date: Start date for aggregation | ||
| Returns: | ||
| Dictionary mapping YYYY-MM-DD to contribution count | ||
| """ | ||
| contribution_map: dict[str, int] = {} | ||
|
|
||
| repositories = list(project.repositories.all()) | ||
| if project.owasp_repository: | ||
| repositories.append(project.owasp_repository) | ||
|
|
||
| repository_ids = [repo.id for repo in repositories if repo] | ||
|
|
||
| if not repository_ids: | ||
| return contribution_map | ||
|
|
||
| # Aggregate commits | ||
| self._aggregate_contribution_dates( | ||
| Commit.objects.filter( | ||
| repository_id__in=repository_ids, | ||
| created_at__gte=start_date, | ||
| ), | ||
| "created_at", | ||
| contribution_map, | ||
| ) | ||
|
|
||
| # Aggregate issues | ||
| self._aggregate_contribution_dates( | ||
| Issue.objects.filter( | ||
| repository_id__in=repository_ids, | ||
| created_at__gte=start_date, | ||
| ), | ||
| "created_at", | ||
| contribution_map, | ||
| ) | ||
|
|
||
| # Aggregate pull requests | ||
| self._aggregate_contribution_dates( | ||
| PullRequest.objects.filter( | ||
| repository_id__in=repository_ids, | ||
| created_at__gte=start_date, | ||
| ), | ||
| "created_at", | ||
| contribution_map, | ||
| ) | ||
|
|
||
| # Aggregate releases (exclude drafts) | ||
| self._aggregate_contribution_dates( | ||
| Release.objects.filter( | ||
| repository_id__in=repository_ids, | ||
| published_at__gte=start_date, | ||
| is_draft=False, | ||
| ), | ||
| "published_at", | ||
| contribution_map, | ||
| ) | ||
|
|
||
| return contribution_map | ||
|
|
||
| def handle(self, *args, **options): | ||
| """Execute the command.""" | ||
| entity_type = options["entity_type"] | ||
| days = options["days"] | ||
| key = options.get("key") | ||
| offset = options["offset"] | ||
|
|
||
| start_date = timezone.now() - timedelta(days=days) | ||
|
|
||
| self.stdout.write( | ||
| self.style.SUCCESS( | ||
| f"Aggregating contributions from {start_date.date()} ({days} days back)", | ||
| ), | ||
| ) | ||
|
|
||
| # Process chapters | ||
| if entity_type in ["chapter", "both"]: | ||
| self._process_chapters(start_date, key, offset) | ||
|
|
||
| # Process projects | ||
| if entity_type in ["project", "both"]: | ||
| self._process_projects(start_date, key, offset) | ||
|
|
||
| self.stdout.write(self.style.SUCCESS("Done!")) | ||
|
|
||
| def _process_chapters(self, start_date, key, offset): | ||
| """Process chapters for contribution aggregation.""" | ||
| chapter_queryset = Chapter.objects.filter(is_active=True) | ||
|
|
||
| if key: | ||
| chapter_queryset = chapter_queryset.filter(key=key) | ||
|
|
||
| if offset: | ||
| chapter_queryset = chapter_queryset[offset:] | ||
|
|
||
| chapter_queryset = chapter_queryset.select_related("owasp_repository") | ||
| chapters = list(chapter_queryset) | ||
| self.stdout.write(f"Processing {len(chapters)} chapters...") | ||
|
|
||
| for chapter in chapters: | ||
| contribution_data = self.aggregate_chapter_contributions( | ||
| chapter, | ||
| start_date, | ||
| ) | ||
| chapter.contribution_data = contribution_data | ||
|
|
||
| if chapters: | ||
| Chapter.bulk_save(chapters, fields=("contribution_data",)) | ||
| self.stdout.write( | ||
| self.style.SUCCESS(f"✓ Updated {len(chapters)} chapters"), | ||
| ) | ||
|
|
||
| def _process_projects(self, start_date, key, offset): | ||
| """Process projects for contribution aggregation.""" | ||
| project_queryset = Project.objects.filter(is_active=True) | ||
|
|
||
| if key: | ||
| project_queryset = project_queryset.filter(key=key) | ||
|
|
||
| if offset: | ||
| project_queryset = project_queryset[offset:] | ||
|
|
||
| project_queryset = project_queryset.select_related( | ||
| "owasp_repository" | ||
| ).prefetch_related("repositories") | ||
| projects = list(project_queryset) | ||
| self.stdout.write(f"Processing {len(projects)} projects...") | ||
|
|
||
| for project in projects: | ||
| contribution_data = self.aggregate_project_contributions( | ||
| project, | ||
| start_date, | ||
| ) | ||
| project.contribution_data = contribution_data | ||
|
|
||
| if projects: | ||
| Project.bulk_save(projects, fields=("contribution_data",)) | ||
| self.stdout.write( | ||
| self.style.SUCCESS(f"✓ Updated {len(projects)} projects"), | ||
| ) | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,32 @@ | ||
| # Generated by Django 5.2.8 on 2025-11-16 18:18 | ||
|
|
||
| from django.db import migrations, models | ||
|
|
||
|
|
||
| class Migration(migrations.Migration): | ||
| dependencies = [ | ||
| ("owasp", "0065_memberprofile_linkedin_page_id"), | ||
| ] | ||
|
|
||
| operations = [ | ||
| migrations.AddField( | ||
| model_name="chapter", | ||
| name="contribution_data", | ||
| field=models.JSONField( | ||
| blank=True, | ||
| default=dict, | ||
| help_text="Daily contribution counts (YYYY-MM-DD -> count mapping)", | ||
| verbose_name="Contribution Data", | ||
| ), | ||
| ), | ||
| migrations.AddField( | ||
| model_name="project", | ||
| name="contribution_data", | ||
| field=models.JSONField( | ||
| blank=True, | ||
| default=dict, | ||
| help_text="Daily contribution counts (YYYY-MM-DD -> count mapping)", | ||
| verbose_name="Contribution Data", | ||
| ), | ||
| ), | ||
| ] |
Uh oh!
There was an error while loading. Please reload this page.