Skip to content
Open
Show file tree
Hide file tree
Changes from 38 commits
Commits
Show all changes
49 commits
Select commit Hold shift + click to select a range
6c01854
feat: add contribution_data field to Chapter and Project models
mrkeshav-05 Nov 18, 2025
0dabbb0
Create migration 0066 to add fields to database
mrkeshav-05 Nov 18, 2025
022708c
add management command to aggregate contributions
mrkeshav-05 Nov 18, 2025
ba82c05
expose contribution_data in GraphQL API
mrkeshav-05 Nov 18, 2025
7d8bae7
refactor: extract contribution date aggregation into a separate metho…
mrkeshav-05 Nov 18, 2025
35054a5
test: add contribution_data to the test's expected field names set.
mrkeshav-05 Nov 18, 2025
a19457d
test: add unit tests for ChapterNode field resolutions and configurat…
mrkeshav-05 Nov 18, 2025
9af40eb
test: add unit tests for owasp_aggregate_contributions management com…
mrkeshav-05 Nov 18, 2025
fef78d2
feat: add contributionData to GraphQL queries
mrkeshav-05 Nov 18, 2025
f535ab0
feat: update TypeScript types for contribution data
mrkeshav-05 Nov 18, 2025
f82b793
feat: add contributionData field to both project and chapter nodes
mrkeshav-05 Nov 18, 2025
50b314c
fixed linting errors
mrkeshav-05 Nov 18, 2025
de69692
fixed failed tests in chapter_test.py
mrkeshav-05 Nov 18, 2025
7315360
fixed backend tests in owasp_aggregate_contributions_test.py
mrkeshav-05 Nov 18, 2025
4b7cae1
feat: add contribution heatmap to chapter details page
mrkeshav-05 Nov 18, 2025
080d138
feat: integrate contribution heatmap into project details page
mrkeshav-05 Nov 18, 2025
77a04d4
feat: enhance contribution heatmap component with isCompact prop
mrkeshav-05 Nov 19, 2025
37320b5
apply variant compact
mrkeshav-05 Nov 19, 2025
7bb15d3
feat: add contribution stats and heatmap to chapter and project detai…
mrkeshav-05 Nov 19, 2025
bb03cec
make chapter page same as project page
mrkeshav-05 Nov 20, 2025
a3ae3f1
reduce the gap between the cards
mrkeshav-05 Nov 20, 2025
c4d6aab
optimize queryset by select_related for owasp_repository
mrkeshav-05 Nov 20, 2025
47014ad
feat: optimize project queryset with select_related and prefetch_related
mrkeshav-05 Nov 20, 2025
57a4af0
chapter contribution aggregation tests
mrkeshav-05 Nov 20, 2025
af7c1d6
fix: update contribution_data test to assert field type
mrkeshav-05 Nov 20, 2025
78e1553
update contribution stats calculation to provide estimated values
mrkeshav-05 Nov 20, 2025
cc8c487
fix: handle null contribution stats in project and chapter details so…
mrkeshav-05 Nov 20, 2025
8b384b8
fixing sonalcloud errors
mrkeshav-05 Nov 20, 2025
ecff8f5
run pnpm lint
mrkeshav-05 Nov 20, 2025
9f08319
adjust layout and formatting
mrkeshav-05 Nov 20, 2025
98a4e34
fixedbackend testcases
mrkeshav-05 Nov 20, 2025
0e8dbde
remove redundant code in project and chapter
mrkeshav-05 Nov 20, 2025
0fad13f
make another component for github stats
mrkeshav-05 Nov 20, 2025
4b4135f
fixing sonar issues
mrkeshav-05 Nov 20, 2025
2f7e3db
fixing tests
mrkeshav-05 Nov 20, 2025
b2747d1
remove unused variables
mrkeshav-05 Nov 20, 2025
7ca8cd0
refactor: improve heatmap series generation and chart options
mrkeshav-05 Nov 20, 2025
bd2bc94
fixing sonar cloud issues
mrkeshav-05 Nov 20, 2025
27025a4
fix make check
mrkeshav-05 Nov 21, 2025
e78fd23
fixing sonarcloud issues
mrkeshav-05 Nov 21, 2025
5d08daf
handling null/undefined values
mrkeshav-05 Nov 21, 2025
4f69257
include test identifiers
mrkeshav-05 Nov 21, 2025
4058c9d
add tests for ContributionHeatmap component with various scenarios
mrkeshav-05 Nov 21, 2025
814f38c
add tests for ContributionStats component
mrkeshav-05 Nov 21, 2025
d7a3556
fixing sonarcloud issues
mrkeshav-05 Nov 21, 2025
573476b
sort active chapters and projects
mrkeshav-05 Nov 21, 2025
0cf7e93
added trailing whitespace
mrkeshav-05 Nov 21, 2025
0999457
apply make check
mrkeshav-05 Nov 21, 2025
5c609ff
pnpm run lint -- --fix
mrkeshav-05 Nov 21, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions backend/apps/owasp/api/internal/nodes/chapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ class GeoLocationType:
@strawberry_django.type(
Chapter,
fields=[
"contribution_data",
"country",
"is_active",
"meetup_group",
Expand Down
1 change: 1 addition & 0 deletions backend/apps/owasp/api/internal/nodes/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
@strawberry_django.type(
Project,
fields=[
"contribution_data",
"contributors_count",
"created_at",
"forks_count",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,281 @@
"""Management command to aggregate contributions for chapters and projects."""

from datetime import datetime, timedelta

from django.core.management.base import BaseCommand
from django.utils import timezone

from apps.github.models.commit import Commit
from apps.github.models.issue import Issue
from apps.github.models.pull_request import PullRequest
from apps.github.models.release import Release
from apps.owasp.models.chapter import Chapter
from apps.owasp.models.project import Project


class Command(BaseCommand):
"""Aggregate contribution data for chapters and projects."""

help = "Aggregate contributions (commits, issues, PRs, releases) for chapters and projects"

def add_arguments(self, parser):
"""Add command arguments."""
parser.add_argument(
"--entity-type",
type=str,
choices=["chapter", "project", "both"],
default="both",
help="Entity type to aggregate: chapter, project, or both",
)
parser.add_argument(
"--days",
type=int,
default=365,
help="Number of days to look back for contributions (default: 365)",
)
parser.add_argument(
"--key",
type=str,
help="Specific chapter or project key to aggregate",
)
parser.add_argument(
"--offset",
type=int,
default=0,
help="Skip the first N entities",
)

def _aggregate_contribution_dates(
self,
queryset,
date_field: str,
contribution_map: dict[str, int],
) -> None:
"""Aggregate contribution dates from a queryset into the contribution map.
Args:
queryset: Django queryset to aggregate
date_field: Name of the date field to aggregate on
contribution_map: Dictionary to update with counts
"""
dates = queryset.values_list(date_field, flat=True)
for date_value in dates:
if date_value:
date_key = date_value.date().isoformat()
contribution_map[date_key] = contribution_map.get(date_key, 0) + 1
Comment on lines +48 to +66
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Aggregation loses contribution type information, forcing frontend estimation.

The _aggregate_contribution_dates helper increments a single daily count without tracking the contribution type (commits, issues, PRs, releases). This design forces the frontend to use arbitrary percentage estimates (see lines 72-92 in frontend/src/app/chapters/[chapterKey]/page.tsx and lines 101-121 in frontend/src/app/projects/[projectKey]/page.tsx), producing inaccurate statistics.

To enable accurate frontend display, modify the aggregation to track contribution types:

def _aggregate_contribution_dates(
    self,
    queryset,
    date_field: str,
    contribution_type: str,  # Add type parameter
    contribution_map: dict[str, dict[str, int]],  # Nested structure
) -> None:
    """Aggregate contribution dates by type into the contribution map.
    
    Args:
        queryset: Django queryset to aggregate
        date_field: Name of the date field to aggregate on
        contribution_type: Type of contribution (commits, issues, pullRequests, releases)
        contribution_map: Nested dict {date: {type: count}}
    """
    dates = queryset.values_list(date_field, flat=True)
    for date_value in dates:
        if date_value:
            date_key = date_value.date().isoformat()
            if date_key not in contribution_map:
                contribution_map[date_key] = {}
            contribution_map[date_key][contribution_type] = (
                contribution_map[date_key].get(contribution_type, 0) + 1
            )

Then update callers:

# In aggregate_chapter_contributions and aggregate_project_contributions
contribution_map: dict[str, dict[str, int]] = {}

self._aggregate_contribution_dates(
    Commit.objects.filter(...),
    "created_at",
    "commits",  # Add type
    contribution_map,
)

self._aggregate_contribution_dates(
    Issue.objects.filter(...),
    "created_at", 
    "issues",  # Add type
    contribution_map,
)
# ... repeat for pullRequests and releases

This requires coordinated updates to:

  • Backend model JSONField to store nested structure
  • GraphQL schema/types to expose per-type data
  • Frontend types and aggregation logic to read per-type values
🤖 Prompt for AI Agents
backend/apps/owasp/management/commands/owasp_aggregate_contributions.py around
lines 48-66: the helper currently aggregates dates into a flat count which loses
contribution type info; change the method signature to accept a
contribution_type string and update contribution_map to be a nested dict keyed
by date -> {type: count}, incrementing contribution_map[date][contribution_type]
per item; update all callers (aggregate_chapter_contributions and
aggregate_project_contributions) to pass the appropriate type strings (commits,
issues, pullRequests, releases) and initialize contribution_map as dict[str,
dict[str,int]]; coordinate follow-up changes to the model JSONField to store the
nested structure, update GraphQL types/resolvers to expose per-type counts, and
adjust frontend types/aggregation/display logic to consume the new per-type
data.


def aggregate_chapter_contributions(
self,
chapter: Chapter,
start_date: datetime,
) -> dict[str, int]:
"""Aggregate contributions for a chapter.
Args:
chapter: Chapter instance
start_date: Start date for aggregation
Returns:
Dictionary mapping YYYY-MM-DD to contribution count
"""
contribution_map: dict[str, int] = {}

if not chapter.owasp_repository:
return contribution_map

repository = chapter.owasp_repository

# Aggregate commits
self._aggregate_contribution_dates(
Commit.objects.filter(
repository=repository,
created_at__gte=start_date,
),
"created_at",
contribution_map,
)

# Aggregate issues
self._aggregate_contribution_dates(
Issue.objects.filter(
repository=repository,
created_at__gte=start_date,
),
"created_at",
contribution_map,
)

# Aggregate pull requests
self._aggregate_contribution_dates(
PullRequest.objects.filter(
repository=repository,
created_at__gte=start_date,
),
"created_at",
contribution_map,
)

# Aggregate releases (exclude drafts)
self._aggregate_contribution_dates(
Release.objects.filter(
repository=repository,
published_at__gte=start_date,
is_draft=False,
),
"published_at",
contribution_map,
)

return contribution_map

def aggregate_project_contributions(
self,
project: Project,
start_date: datetime,
) -> dict[str, int]:
"""Aggregate contributions for a project across all its repositories.
Args:
project: Project instance
start_date: Start date for aggregation
Returns:
Dictionary mapping YYYY-MM-DD to contribution count
"""
contribution_map: dict[str, int] = {}

repositories = list(project.repositories.all())
if project.owasp_repository:
repositories.append(project.owasp_repository)

repository_ids = [repo.id for repo in repositories if repo]

if not repository_ids:
return contribution_map

# Aggregate commits
self._aggregate_contribution_dates(
Commit.objects.filter(
repository_id__in=repository_ids,
created_at__gte=start_date,
),
"created_at",
contribution_map,
)

# Aggregate issues
self._aggregate_contribution_dates(
Issue.objects.filter(
repository_id__in=repository_ids,
created_at__gte=start_date,
),
"created_at",
contribution_map,
)

# Aggregate pull requests
self._aggregate_contribution_dates(
PullRequest.objects.filter(
repository_id__in=repository_ids,
created_at__gte=start_date,
),
"created_at",
contribution_map,
)

# Aggregate releases (exclude drafts)
self._aggregate_contribution_dates(
Release.objects.filter(
repository_id__in=repository_ids,
published_at__gte=start_date,
is_draft=False,
),
"published_at",
contribution_map,
)

return contribution_map

def handle(self, *args, **options):
"""Execute the command."""
entity_type = options["entity_type"]
days = options["days"]
key = options.get("key")
offset = options["offset"]

start_date = timezone.now() - timedelta(days=days)

self.stdout.write(
self.style.SUCCESS(
f"Aggregating contributions from {start_date.date()} ({days} days back)",
),
)

# Process chapters
if entity_type in ["chapter", "both"]:
self._process_chapters(start_date, key, offset)

# Process projects
if entity_type in ["project", "both"]:
self._process_projects(start_date, key, offset)

self.stdout.write(self.style.SUCCESS("Done!"))

def _process_chapters(self, start_date, key, offset):
"""Process chapters for contribution aggregation."""
chapter_queryset = Chapter.objects.filter(is_active=True)

if key:
chapter_queryset = chapter_queryset.filter(key=key)

if offset:
chapter_queryset = chapter_queryset[offset:]

chapter_queryset = chapter_queryset.select_related("owasp_repository")
chapters = list(chapter_queryset)
self.stdout.write(f"Processing {len(chapters)} chapters...")

for chapter in chapters:
contribution_data = self.aggregate_chapter_contributions(
chapter,
start_date,
)
chapter.contribution_data = contribution_data

if chapters:
Chapter.bulk_save(chapters, fields=("contribution_data",))
self.stdout.write(
self.style.SUCCESS(f"✓ Updated {len(chapters)} chapters"),
)

def _process_projects(self, start_date, key, offset):
"""Process projects for contribution aggregation."""
project_queryset = Project.objects.filter(is_active=True)

if key:
project_queryset = project_queryset.filter(key=key)

if offset:
project_queryset = project_queryset[offset:]

project_queryset = project_queryset.select_related(
"owasp_repository"
).prefetch_related("repositories")
projects = list(project_queryset)
self.stdout.write(f"Processing {len(projects)} projects...")

for project in projects:
contribution_data = self.aggregate_project_contributions(
project,
start_date,
)
project.contribution_data = contribution_data

if projects:
Project.bulk_save(projects, fields=("contribution_data",))
self.stdout.write(
self.style.SUCCESS(f"✓ Updated {len(projects)} projects"),
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Generated by Django 5.2.8 on 2025-11-16 18:18

from django.db import migrations, models


class Migration(migrations.Migration):
dependencies = [
("owasp", "0065_memberprofile_linkedin_page_id"),
]

operations = [
migrations.AddField(
model_name="chapter",
name="contribution_data",
field=models.JSONField(
blank=True,
default=dict,
help_text="Daily contribution counts (YYYY-MM-DD -> count mapping)",
verbose_name="Contribution Data",
),
),
migrations.AddField(
model_name="project",
name="contribution_data",
field=models.JSONField(
blank=True,
default=dict,
help_text="Daily contribution counts (YYYY-MM-DD -> count mapping)",
verbose_name="Contribution Data",
),
),
]
7 changes: 7 additions & 0 deletions backend/apps/owasp/models/chapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,13 @@ class Meta:
latitude = models.FloatField(verbose_name="Latitude", blank=True, null=True)
longitude = models.FloatField(verbose_name="Longitude", blank=True, null=True)

contribution_data = models.JSONField(
verbose_name="Contribution Data",
default=dict,
blank=True,
help_text="Daily contribution counts (YYYY-MM-DD -> count mapping)",
)

# GRs.
members = GenericRelation("owasp.EntityMember")

Expand Down
7 changes: 7 additions & 0 deletions backend/apps/owasp/models/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,13 @@ class Meta:
custom_tags = models.JSONField(verbose_name="Custom tags", default=list, blank=True)
track_issues = models.BooleanField(verbose_name="Track issues", default=True)

contribution_data = models.JSONField(
verbose_name="Contribution Data",
default=dict,
blank=True,
help_text="Daily contribution counts (YYYY-MM-DD -> count mapping)",
)

# GKs.
members = GenericRelation("owasp.EntityMember")

Expand Down
Loading