From 4da6b27180230f1097236b96714dc098494059f1 Mon Sep 17 00:00:00 2001 From: Tushar <30565750+tushar5526@users.noreply.github.com> Date: Fri, 24 May 2024 16:46:49 +0530 Subject: [PATCH 01/76] Create Dockerfile --- Dockerfile | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 Dockerfile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..a4f4da0 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,21 @@ +# Use an official Python runtime as a parent image +FROM python:3.11-slim + +# Set the working directory in the container +WORKDIR /app + +# Copy the current directory contents into the container at /app +COPY . /app + +# Install any needed packages specified in requirements.txt +RUN pip install --no-cache-dir -r requirements.txt + +# Make port 5000 available to the world outside this container +EXPOSE 5000 + +# Define environment variable +ENV FLASK_APP=wsgi.py +ENV FLASK_RUN_HOST=0.0.0.0 + +# Run the application +CMD ["flask", "run"] From 0f07f4f4ef78c87b7ce1984245372127f03fd27f Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 11:31:17 +0000 Subject: [PATCH 02/76] feat: dockerize --- Dockerfile | 2 +- docker-compose.yml | 11 +++++++++++ sample.env | 2 ++ wsgi.py | 2 +- 4 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 docker-compose.yml create mode 100644 sample.env diff --git a/Dockerfile b/Dockerfile index a4f4da0..212817c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Use an official Python runtime as a parent image -FROM python:3.11-slim +FROM python:3.12-slim # Set the working directory in the container WORKDIR /app diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..5d41718 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,11 @@ +version: '3.8' + +services: + web: + build: . + ports: + - "5000:5000" + environment: + FLASK_ENV: ${FLASK_ENV:-development} + SUPABASE_URL: ${SUPABASE_URL} + SUPABASE_KEY: ${SUPABASE_KEY} diff --git a/sample.env b/sample.env new file mode 100644 index 0000000..1e4d290 --- /dev/null +++ b/sample.env @@ -0,0 +1,2 @@ +SUPABASE_URL="" +SUPABASE_KEY="" \ No newline at end of file diff --git a/wsgi.py b/wsgi.py index 6026b0f..bf4d905 100644 --- a/wsgi.py +++ b/wsgi.py @@ -1,4 +1,4 @@ from app import app if __name__ == "__main__": - app.run() + app.run(host='0.0.0.0') From df2a400939ea6429b8e59b69cf2beb468599afdf Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:13:01 +0000 Subject: [PATCH 03/76] feat: add ci --- .github/workflows/ci.yml | 147 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 147 insertions(+) create mode 100644 .github/workflows/ci.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..dc0ea6b --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,147 @@ +name: Build + +env: + APP_NAME: CMS-BACKEND-API + PROJECT_NAME: CMS-BACKEND-API + DOCKER_COMPOSE_PATH: /root/app/docker-compose.yml + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + DOCKER_REGISTRY: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + DOT_ENV_FILE_NAME: env.data + + +on: + workflow_dispatch: + push: + branches: + - devops + - dev + - main + +permissions: + contents: write + +jobs: + set_vars: + name: Set Environment Variables + runs-on: ubuntu-latest + outputs: + TAG_LATEST: ${{ steps.tag_values.outputs.TAG_LATEST }} + TAG_ENV_COMMIT: ${{ steps.tag_values.outputs.TAG_ENV_COMMIT }} + APP_ENV: ${{ steps.tag_values.outputs.APP_ENV }} + steps: + - name: Set Docker Image Tags + id: tag_values + run: | + case "${{ github.ref }}" in + 'refs/heads/main') + echo "TAG_LATEST=prod-latest" >> $GITHUB_OUTPUT + echo "TAG_ENV_COMMIT=prod-${GITHUB_SHA:0:5}" >> $GITHUB_OUTPUT + echo "APP_ENV=PROD" >> $GITHUB_OUTPUT + ;; + 'refs/heads/devops') + echo "TAG_LATEST=dev-latest" >> $GITHUB_OUTPUT + echo "TAG_ENV_COMMIT=dev-${GITHUB_SHA:0:5}" >> $GITHUB_OUTPUT + echo "APP_ENV=DEV" >> $GITHUB_OUTPUT + ;; + 'refs/heads/dev') + echo "TAG_LATEST=dev-latest" >> $GITHUB_OUTPUT + echo "TAG_ENV_COMMIT=dev-${GITHUB_SHA:0:5}" >> $GITHUB_OUTPUT + echo "APP_ENV=DEV" >> $GITHUB_OUTPUT + ;; + esac + + build: + name: Build + runs-on: ubuntu-latest + needs: [set_vars] + permissions: + contents: read + packages: write + env: + TAG_LATEST: ${{ needs.set_vars.outputs.TAG_LATEST }} + TAG_ENV_COMMIT: ${{ needs.set_vars.outputs.TAG_ENV_COMMIT }} + SUPABASE_URL: ${{ vars[format('APP_{0}_SUPABASE_URL', needs.set_vars.outputs.APP_ENV)] }} + SUPABASE_KEY: ${{ secrets[format('APP_{0}_SUPABASE_KEY', needs.set_vars.outputs.APP_ENV)] }} + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Login to GitHub Packages + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + + - name: Set Docker Tags + uses: actions/setup-node@v2 + + - name: Read Secrets + run: | + echo "SUPABASE_URL=${SUPABASE_URL}" >> .env + echo "SUPABASE_KEY=${SUPABASE_KEY}" >> .env + mv .env ${{ env.DOT_ENV_FILE_NAME }} + + - name: Copy env file to DEV Server + uses: appleboy/scp-action@v0.1.7 + if: needs.set_vars.outputs.APP_ENV == 'DEV' + with: + host: ${{ vars.DEV_SERVER_HOST }} + username: ${{ vars.DEV_SERVER_USERNAME }} + key: ${{ secrets.DEV_SSH_PRIVATE_KEY }} + port: ${{ vars.DEV_SERVER_PORT }} + source: "${{ env.DOT_ENV_FILE_NAME }}" + target: /root/app/ + + - name: Build ${{ env.APP_NAME }} Docker image + run: | + docker build -t ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} . + + - name: Add tag to Docker image + run: | + echo ${{ github.sha }} + docker tag ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_ENV_COMMIT }} + + - name: Push Docker image to GitHub Packages + run: | + docker push ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} + docker push ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_ENV_COMMIT }} + + deploy: + name: Deployment + runs-on: ubuntu-latest + needs: build + if: github.event_name == 'push' && github.ref_type == 'branch' + + steps: + - name: Deploy to DevOps/Dev Environment + if: github.ref == 'refs/heads/devops' || github.ref == 'refs/heads/dev' + uses: appleboy/ssh-action@v1.0.3 + env: + DOCKER_COMPOSE_PATH: ${{ env.DOCKER_COMPOSE_PATH }} + APP_NAME: ${{ env.APP_NAME }} + DOCKER_REGISTRY: ${{ env.DOCKER_REGISTRY }} + with: + host: ${{ vars.DEV_SERVER_HOST }} + username: ${{ vars.DEV_SERVER_USERNAME }} + key: ${{ secrets.DEV_SSH_PRIVATE_KEY }} + port: ${{ vars.DEV_SERVER_PORT }} + allenvs: true + script_stop: true + envs: DOCKER_COMPOSE_PATH,APP_NAME,DOCKER_REGISTRY + script: | + echo "===============================" + echo "Deploying to Dev environment running on Docker Compose" + echo "===============================" + echo "Docker Compose Path $DOCKER_COMPOSE_PATH" + echo "App Name $APP_NAME" + echo "Docker Registry $DOCKER_REGISTRY" + COMMIT=$(echo $GITHUB_SHA | cut -c1-5) + TAG_ENV_COMMIT="dev-${COMMIT}" + CURRENT_TAG=$(grep "${DOCKER_REGISTRY}:" $DOCKER_COMPOSE_PATH | sed -n 's/.*'"${APP_NAME}"':\([^"]*\).*/\1/p') + echo "Current Tag: $CURRENT_TAG" + echo "Latest Tag: $TAG_ENV_COMMIT" + sudo sed -i 's|'"${DOCKER_REGISTRY}:${CURRENT_TAG}"'|'"${DOCKER_REGISTRY}:${TAG_ENV_COMMIT}"'|g' $DOCKER_COMPOSE_PATH + docker pull $DOCKER_REGISTRY:$TAG_ENV_COMMIT + docker compose -f $DOCKER_COMPOSE_PATH up -d + + - name: Deploy to Prod environment + if: github.ref == 'refs/heads/main' + run: echo "Deploying to Kubernetes" \ No newline at end of file From 98b23df96c596f1dbbb6d33deae720292a011204 Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:20:43 +0000 Subject: [PATCH 04/76] fix --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dc0ea6b..8ebbc9e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,7 +6,7 @@ env: DOCKER_COMPOSE_PATH: /root/app/docker-compose.yml REGISTRY: ghcr.io IMAGE_NAME: ${{ github.repository }} - DOCKER_REGISTRY: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + DOCKER_REGISTRY: ghcr.io/Code4GovTech/DMP-CMS-Backend-API DOT_ENV_FILE_NAME: env.data From 027ba83a87adbc6e6b93f2d0caffdd7bb34c312c Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:29:18 +0000 Subject: [PATCH 05/76] update docker image --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8ebbc9e..f787e9a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,7 +6,7 @@ env: DOCKER_COMPOSE_PATH: /root/app/docker-compose.yml REGISTRY: ghcr.io IMAGE_NAME: ${{ github.repository }} - DOCKER_REGISTRY: ghcr.io/Code4GovTech/DMP-CMS-Backend-API + DOCKER_REGISTRY: ghcr.io/code4govtech/dmp-cms-backend-api DOT_ENV_FILE_NAME: env.data From 5ea444eaa37a76eeb80286225f0128d8c585749f Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:36:40 +0000 Subject: [PATCH 06/76] fix image name --- .github/workflows/ci.yml | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f787e9a..67c1c91 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,7 +6,6 @@ env: DOCKER_COMPOSE_PATH: /root/app/docker-compose.yml REGISTRY: ghcr.io IMAGE_NAME: ${{ github.repository }} - DOCKER_REGISTRY: ghcr.io/code4govtech/dmp-cms-backend-api DOT_ENV_FILE_NAME: env.data @@ -92,17 +91,17 @@ jobs: - name: Build ${{ env.APP_NAME }} Docker image run: | - docker build -t ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} . + docker build -t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG_LATEST }} . - name: Add tag to Docker image run: | echo ${{ github.sha }} - docker tag ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_ENV_COMMIT }} + docker tag ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG_LATEST }} ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG_ENV_COMMIT }} - name: Push Docker image to GitHub Packages run: | - docker push ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} - docker push ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_ENV_COMMIT }} + docker push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG_LATEST }} + docker push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG_ENV_COMMIT }} deploy: name: Deployment @@ -117,7 +116,7 @@ jobs: env: DOCKER_COMPOSE_PATH: ${{ env.DOCKER_COMPOSE_PATH }} APP_NAME: ${{ env.APP_NAME }} - DOCKER_REGISTRY: ${{ env.DOCKER_REGISTRY }} + DOCKER_REGISTRY: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} with: host: ${{ vars.DEV_SERVER_HOST }} username: ${{ vars.DEV_SERVER_USERNAME }} From af01457cec970d23d48665f38e8ec5983427d786 Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:40:10 +0000 Subject: [PATCH 07/76] fix image name --- .github/workflows/ci.yml | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 67c1c91..9c43943 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,8 +4,7 @@ env: APP_NAME: CMS-BACKEND-API PROJECT_NAME: CMS-BACKEND-API DOCKER_COMPOSE_PATH: /root/app/docker-compose.yml - REGISTRY: ghcr.io - IMAGE_NAME: ${{ github.repository }} + DOCKER_REGISTRY: ghcr.io/code4govtech/dmp-cms-backend-api DOT_ENV_FILE_NAME: env.data @@ -91,17 +90,17 @@ jobs: - name: Build ${{ env.APP_NAME }} Docker image run: | - docker build -t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG_LATEST }} . + docker build -t ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} . - name: Add tag to Docker image run: | echo ${{ github.sha }} - docker tag ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG_LATEST }} ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG_ENV_COMMIT }} + docker tag ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_ENV_COMMIT }} - name: Push Docker image to GitHub Packages run: | - docker push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG_LATEST }} - docker push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG_ENV_COMMIT }} + docker push ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} + docker push ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_ENV_COMMIT }} deploy: name: Deployment @@ -116,7 +115,7 @@ jobs: env: DOCKER_COMPOSE_PATH: ${{ env.DOCKER_COMPOSE_PATH }} APP_NAME: ${{ env.APP_NAME }} - DOCKER_REGISTRY: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + DOCKER_REGISTRY: ${{ env.DOCKER_REGISTRY }} with: host: ${{ vars.DEV_SERVER_HOST }} username: ${{ vars.DEV_SERVER_USERNAME }} From eba3970482eeb2e2d6c31ba56fdb6557938bf1f1 Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:43:18 +0000 Subject: [PATCH 08/76] push --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9c43943..25213a2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,7 +4,7 @@ env: APP_NAME: CMS-BACKEND-API PROJECT_NAME: CMS-BACKEND-API DOCKER_COMPOSE_PATH: /root/app/docker-compose.yml - DOCKER_REGISTRY: ghcr.io/code4govtech/dmp-cms-backend-api + DOCKER_REGISTRY: ghcr.io/code4govtech/DMP-CMS-Backend-API DOT_ENV_FILE_NAME: env.data From 186d095a7f8600b2a495f36b4b2276903e87c1f9 Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:44:29 +0000 Subject: [PATCH 09/76] fix name --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 25213a2..9c43943 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,7 +4,7 @@ env: APP_NAME: CMS-BACKEND-API PROJECT_NAME: CMS-BACKEND-API DOCKER_COMPOSE_PATH: /root/app/docker-compose.yml - DOCKER_REGISTRY: ghcr.io/code4govtech/DMP-CMS-Backend-API + DOCKER_REGISTRY: ghcr.io/code4govtech/dmp-cms-backend-api DOT_ENV_FILE_NAME: env.data From 362ff78e092c0395469e91525e9d92df8bc26fe7 Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:47:14 +0000 Subject: [PATCH 10/76] fix --- .github/workflows/ci.yml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9c43943..0690d6f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -65,8 +65,15 @@ jobs: - name: Checkout code uses: actions/checkout@v2 - - name: Login to GitHub Packages - run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + # - name: Login to GitHub Packages + # run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + + - name: Log in to the Container registry + uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - name: Set Docker Tags uses: actions/setup-node@v2 From 968882f8d9d63d6f0e5199233dbafd5ef2b22889 Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:49:26 +0000 Subject: [PATCH 11/76] fix --- .github/workflows/ci.yml | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0690d6f..3183052 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,6 +18,8 @@ on: permissions: contents: write + packages: write + jobs: set_vars: @@ -65,15 +67,15 @@ jobs: - name: Checkout code uses: actions/checkout@v2 - # - name: Login to GitHub Packages - # run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + - name: Login to GitHub Packages + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin - - name: Log in to the Container registry - uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} + # - name: Log in to the Container registry + # uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 + # with: + # registry: ${{ env.REGISTRY }} + # username: ${{ github.actor }} + # password: ${{ secrets.GITHUB_TOKEN }} - name: Set Docker Tags uses: actions/setup-node@v2 From d414b2be3cf5b412fbe92a59272696f24b0c0624 Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:51:08 +0000 Subject: [PATCH 12/76] ficx --- .github/workflows/ci.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3183052..0d45674 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -67,15 +67,15 @@ jobs: - name: Checkout code uses: actions/checkout@v2 - - name: Login to GitHub Packages - run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + # - name: Login to GitHub Packages + # run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin - # - name: Log in to the Container registry - # uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 - # with: - # registry: ${{ env.REGISTRY }} - # username: ${{ github.actor }} - # password: ${{ secrets.GITHUB_TOKEN }} + - name: Log in to the Container registry + uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - name: Set Docker Tags uses: actions/setup-node@v2 From 0d29f26d2452e1a23c217ea66faee6282228a1a4 Mon Sep 17 00:00:00 2001 From: Tushar Date: Fri, 24 May 2024 12:53:17 +0000 Subject: [PATCH 13/76] fix --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0d45674..b1dc36e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,6 +4,7 @@ env: APP_NAME: CMS-BACKEND-API PROJECT_NAME: CMS-BACKEND-API DOCKER_COMPOSE_PATH: /root/app/docker-compose.yml + REGISTRY: ghcr.io DOCKER_REGISTRY: ghcr.io/code4govtech/dmp-cms-backend-api DOT_ENV_FILE_NAME: env.data From e3124c235a4509ee305bd83f7c6dbbeeb97515ca Mon Sep 17 00:00:00 2001 From: Tushar <30565750+tushar5526@users.noreply.github.com> Date: Sun, 26 May 2024 20:33:02 +0530 Subject: [PATCH 14/76] fix: deploy --- .github/workflows/ci.yml | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b1dc36e..c6acef6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,7 +6,7 @@ env: DOCKER_COMPOSE_PATH: /root/app/docker-compose.yml REGISTRY: ghcr.io DOCKER_REGISTRY: ghcr.io/code4govtech/dmp-cms-backend-api - DOT_ENV_FILE_NAME: env.data + DOT_ENV_FILE_NAME: env.dmp-cms-backend-api on: @@ -135,19 +135,7 @@ jobs: script_stop: true envs: DOCKER_COMPOSE_PATH,APP_NAME,DOCKER_REGISTRY script: | - echo "===============================" - echo "Deploying to Dev environment running on Docker Compose" - echo "===============================" - echo "Docker Compose Path $DOCKER_COMPOSE_PATH" - echo "App Name $APP_NAME" - echo "Docker Registry $DOCKER_REGISTRY" - COMMIT=$(echo $GITHUB_SHA | cut -c1-5) - TAG_ENV_COMMIT="dev-${COMMIT}" - CURRENT_TAG=$(grep "${DOCKER_REGISTRY}:" $DOCKER_COMPOSE_PATH | sed -n 's/.*'"${APP_NAME}"':\([^"]*\).*/\1/p') - echo "Current Tag: $CURRENT_TAG" - echo "Latest Tag: $TAG_ENV_COMMIT" - sudo sed -i 's|'"${DOCKER_REGISTRY}:${CURRENT_TAG}"'|'"${DOCKER_REGISTRY}:${TAG_ENV_COMMIT}"'|g' $DOCKER_COMPOSE_PATH - docker pull $DOCKER_REGISTRY:$TAG_ENV_COMMIT + docker compose pull docker compose -f $DOCKER_COMPOSE_PATH up -d - name: Deploy to Prod environment From 3bb753f04044148609499c83bc664c3cded19a44 Mon Sep 17 00:00:00 2001 From: Tushar <30565750+tushar5526@users.noreply.github.com> Date: Sun, 26 May 2024 20:38:04 +0530 Subject: [PATCH 15/76] fix --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c6acef6..c39491f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -135,6 +135,7 @@ jobs: script_stop: true envs: DOCKER_COMPOSE_PATH,APP_NAME,DOCKER_REGISTRY script: | + echo "Docker Compose Path $DOCKER_COMPOSE_PATH" docker compose pull docker compose -f $DOCKER_COMPOSE_PATH up -d From e2a3a205e1a6768a78a2e197e14ccbdb94b8c681 Mon Sep 17 00:00:00 2001 From: Tushar <30565750+tushar5526@users.noreply.github.com> Date: Sun, 26 May 2024 20:40:57 +0530 Subject: [PATCH 16/76] fix --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c39491f..4143b2a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -136,7 +136,7 @@ jobs: envs: DOCKER_COMPOSE_PATH,APP_NAME,DOCKER_REGISTRY script: | echo "Docker Compose Path $DOCKER_COMPOSE_PATH" - docker compose pull + docker compose -f $DOCKER_COMPOSE_PATH pull docker compose -f $DOCKER_COMPOSE_PATH up -d - name: Deploy to Prod environment From 1167745e3602655b88f714ca9c648b1c8f734351 Mon Sep 17 00:00:00 2001 From: Karan Trehan Date: Mon, 27 May 2024 13:26:23 +0530 Subject: [PATCH 17/76] Update Dockerfile Move to gunicorn command --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 212817c..01538a1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,4 +18,4 @@ ENV FLASK_APP=wsgi.py ENV FLASK_RUN_HOST=0.0.0.0 # Run the application -CMD ["flask", "run"] +CMD ["gunicorn", "wsgi"] From 962065e23a8185df785dcf5073a4e373b36e2e63 Mon Sep 17 00:00:00 2001 From: Karan Trehan Date: Mon, 27 May 2024 14:44:39 +0530 Subject: [PATCH 18/76] Update wsgi.py Application for guinicorn --- wsgi.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/wsgi.py b/wsgi.py index bf4d905..23c7fc5 100644 --- a/wsgi.py +++ b/wsgi.py @@ -1,4 +1,2 @@ -from app import app - -if __name__ == "__main__": - app.run(host='0.0.0.0') +from app import app as application +app = application From 1b41fc298c2bd8ab711249be090b4c04e9b4d1e5 Mon Sep 17 00:00:00 2001 From: S SASIKUMAR <162121972+sasi2312@users.noreply.github.com> Date: Mon, 27 May 2024 15:12:45 +0530 Subject: [PATCH 19/76] Update Dockerfile --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 01538a1..01c274b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,4 +18,5 @@ ENV FLASK_APP=wsgi.py ENV FLASK_RUN_HOST=0.0.0.0 # Run the application -CMD ["gunicorn", "wsgi"] +CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:5000", "wsgi:app"] + From 6e8fe3b99374900987c47ec09624d79518157647 Mon Sep 17 00:00:00 2001 From: Karan Trehan Date: Mon, 27 May 2024 15:41:32 +0530 Subject: [PATCH 20/76] Update Dockerfile Go back to flask run. --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 01c274b..53f495c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,5 +18,5 @@ ENV FLASK_APP=wsgi.py ENV FLASK_RUN_HOST=0.0.0.0 # Run the application -CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:5000", "wsgi:app"] +CMD ["flask", "run"] From 2bcf8f4a46c6e28af8f9cb4b2c9df74bd3d5e161 Mon Sep 17 00:00:00 2001 From: Karan Trehan Date: Mon, 27 May 2024 15:42:33 +0530 Subject: [PATCH 21/76] Revert wsgi.py --- wsgi.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/wsgi.py b/wsgi.py index 23c7fc5..bf4d905 100644 --- a/wsgi.py +++ b/wsgi.py @@ -1,2 +1,4 @@ -from app import app as application -app = application +from app import app + +if __name__ == "__main__": + app.run(host='0.0.0.0') From bd769dd02426dda37473bc962466683aa98514d1 Mon Sep 17 00:00:00 2001 From: sasi Date: Tue, 28 May 2024 11:25:06 +0530 Subject: [PATCH 22/76] API - restructured --- app.py | 144 +++++++++++++++++++++++++++++++++++++++++++---- requirements.txt | 4 +- 2 files changed, 135 insertions(+), 13 deletions(-) diff --git a/app.py b/app.py index 737f120..b759f90 100644 --- a/app.py +++ b/app.py @@ -2,13 +2,21 @@ from db import SupabaseInterface from collections import defaultdict from flasgger import Swagger - +import re,markdown2,requests,os app = Flask(__name__) Swagger(app) +GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') + +headers = { + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {GITHUB_TOKEN}", + "X-GitHub-Api-Version": "2022-11-28" + } + @app.route('/api/greeting', methods=['GET']) def greeting(): """ @@ -135,14 +143,98 @@ def get_issues_by_owner(owner): if not response.data: return jsonify({'error': "No data found"}), 500 data = response.data - data = [{**item, "name": item["owner"]} for item in data] + filtered_data = [{key: item[key] for key in ['owner','body_text']} for item in data] + data = [{**{"name": item.pop("owner"),"description": item.pop("body_text")}, **item} for item in filtered_data] return jsonify(data) except Exception as e: return jsonify({'error': str(e)}), 500 + + + +def find_week_avg(url): + # url = "https://api.github.com/repos/VedantKhairnar/dmp-backend-test-repo/issues/comments" + + response = requests.get(url,headers=headers) + + if response.status_code == 200: + issue_details = response.json() + plain_text_body = markdown2.markdown(issue_details[0]['body']) + + tasks = re.findall(r'\[(x| )\]', plain_text_body) + total_tasks = len(tasks) + completed_tasks = tasks.count('x') + + avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + + #find weekly goal html urls + w_goal_url = None + w_learn_url = None + + for item in issue_details: + if "Weekly Goals" in item['body']: + w_goal_url = item['html_url'] + if "Weekly Learnings" in item['body']: + w_learn_url = item['html_url'] + + return avg,issue_details[0]['user']['login'],issue_details[0]['user']['id'],w_goal_url,w_learn_url + + +@app.route('/api/mentors', methods=['GET']) +def find_mentors(url): + response = requests.get(url,headers=headers) + + if response.status_code == 200: + issue_details = response.json() + + issue_body = issue_details['body'] + pattern = r"## Mentors\s*([\s\S]+?)\s*##" + match = re.search(pattern, issue_body) + + if match: + mentors_text = match.group(1).strip() + # Extract individual mentor usernames + mentors = [mentor.strip() for mentor in mentors_text.split(',')] + else: + mentors = [] + api_base_url = "https://api.github.com/users/" + + ment_username = [] + for val in mentors: + url = f"{api_base_url}{val[1:]}" + username = requests.get(url) + ment_username.append(username.json()['login']) + + return mentors,ment_username + else: + return [],[] + +def get_pr_details(url): + try: + issue_url = url + url_parts = issue_url.split("/") + owner = url_parts[4] + repo = url_parts[5] + issue_number = url_parts[7] + + # GitHub API endpoint to get pull requests for the repository + pulls_url = f"https://api.github.com/repos/{owner}/{repo}/pulls" + + # Send GET request to GitHub API with authentication + response = requests.get(pulls_url, headers=headers) + if response.status_code == 200: + pulls = response.json() + return pulls + else: + return [] + + + except Exception as e: + raise Exception + @app.route('/api/issues//', methods=['GET']) def get_issues_by_owner_id(owner, issue): - """ + """ Fetch issues by owner and issue number. --- parameters: @@ -156,6 +248,7 @@ def get_issues_by_owner_id(owner, issue): type: string required: true description: The issue number + responses: 200: description: Issues fetched successfully @@ -170,15 +263,42 @@ def get_issues_by_owner_id(owner, issue): properties: error: type: string - """ - try: - response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute() - if not response.data: - return jsonify({'error': "No data found"}), 500 - data = response.data - return jsonify(data) - except Exception as e: - return jsonify({'error': str(e)}), 500 + + """ + try: + response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute() + if not response.data: + return jsonify({'error': "No data found"}), 500 + data = response.data + + final_data = [] + + for val in data: + issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) + week_avg ,cont_name,cont_id,w_goal,w_learn = find_week_avg(issue_url) + + mentors,ment_usernames = find_mentors(val['issue_url']) if val['issue_url'] else [],[] + res = { + "name": owner, + "description": None, + "mentor_name": ment_usernames, + "mentor_id": mentors, + "contributor_name":cont_name , + "contributor_id": cont_id, + "org_name": val['owner'], + "org_link": val['repo'], + "weekly_goals_html": w_goal, + "weekly_learnings_html": w_learn, + "overall_progress": week_avg, + "issue_url":val['issue_url'], + "pr_details":get_pr_details(val['issue_url']) + } + + # final_data.append(res) + + return jsonify(res),200 + except Exception as e: + return jsonify({'error': str(e)}), 500 if __name__ == '__main__': app.run(debug=True) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 80a365b..8054db6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,6 @@ httpx==0.27.0 python-dotenv==1.0.1 supabase==2.4.5 gunicorn==22.0.0 -flasgger==0.9.7.1 \ No newline at end of file +flasgger==0.9.7.1 +markdown2==2.4.13 +requests==2.32.2 \ No newline at end of file From a00897fff489693622ec125ae72bb4bce243aecb Mon Sep 17 00:00:00 2001 From: Tushar <30565750+tushar5526@users.noreply.github.com> Date: Tue, 28 May 2024 13:45:42 +0530 Subject: [PATCH 23/76] Create .dockerignore --- .dockerignore | 1 + 1 file changed, 1 insertion(+) create mode 100644 .dockerignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..4c49bd7 --- /dev/null +++ b/.dockerignore @@ -0,0 +1 @@ +.env From 741a1cf8cb0bdcf943998c888dcffdb7a6bedfe8 Mon Sep 17 00:00:00 2001 From: sasi Date: Tue, 28 May 2024 20:12:14 +0530 Subject: [PATCH 24/76] middleware (auth) added --- app.py | 40 ++++++++++++++++++++++++++++++++-------- 1 file changed, 32 insertions(+), 8 deletions(-) diff --git a/app.py b/app.py index b759f90..b5e8764 100644 --- a/app.py +++ b/app.py @@ -1,4 +1,4 @@ -from flask import Flask, jsonify +from flask import Flask, jsonify,request,url_for from db import SupabaseInterface from collections import defaultdict from flasgger import Swagger @@ -17,8 +17,32 @@ "X-GitHub-Api-Version": "2022-11-28" } -@app.route('/api/greeting', methods=['GET']) -def greeting(): + + +# Define a list of routes that should be protected +protected_routes = ['/greeting', '/get-data', '/issues', '/issues/', '/issues//'] +SECRET_KEY =os.getenv('SECRET_KEY') + +protected_routes = [ + re.compile(r'^/greeting$'), + re.compile(r'^/get-data$'), + re.compile(r'^/issues$'), + re.compile(r'^/issues/[^/]+$'), # Matches '/issues/' + re.compile(r'^/issues/[^/]+/[^/]+$') # Matches '/issues//' +] + +# Before request handler to check for the presence of the secret key +@app.before_request +def check_secret_key(): + for route_pattern in protected_routes: + if route_pattern.match(request.path): + secret_key = request.headers.get('X-Secret-Key') + if secret_key != SECRET_KEY: + return jsonify({'message': 'Unauthorized access'}), 401 + break # Stop checking if the current route matches + +@app.route('/greeting', methods=['GET']) +def greeting(): """ A simple greeting endpoint. --- @@ -37,7 +61,7 @@ def greeting(): } return jsonify(response) -@app.route('/api/get-data', methods=['GET']) +@app.route('/get-data', methods=['GET']) def get_data(): """ Fetch data from Supabase. @@ -82,7 +106,7 @@ def group_by_owner(data): return {"issues":res} -@app.route('/api/issues', methods=['GET']) +@app.route('/issues', methods=['GET']) def get_issues(): """ Fetch all issues and group by owner. @@ -112,7 +136,7 @@ def get_issues(): except Exception as e: return jsonify({'error': str(e)}), 500 -@app.route('/api/issues/', methods=['GET']) +@app.route('/issues/', methods=['GET']) def get_issues_by_owner(owner): """ Fetch issues by owner. @@ -179,7 +203,7 @@ def find_week_avg(url): return avg,issue_details[0]['user']['login'],issue_details[0]['user']['id'],w_goal_url,w_learn_url -@app.route('/api/mentors', methods=['GET']) +@app.route('/mentors', methods=['GET']) def find_mentors(url): response = requests.get(url,headers=headers) @@ -232,7 +256,7 @@ def get_pr_details(url): raise Exception -@app.route('/api/issues//', methods=['GET']) +@app.route('/issues//', methods=['GET']) def get_issues_by_owner_id(owner, issue): """ Fetch issues by owner and issue number. From 3ebbf1edd89e613231de64219215027bd7473e92 Mon Sep 17 00:00:00 2001 From: sasi Date: Wed, 29 May 2024 14:05:40 +0530 Subject: [PATCH 25/76] middleware change --- app.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/app.py b/app.py index b5e8764..035ca82 100644 --- a/app.py +++ b/app.py @@ -31,15 +31,6 @@ re.compile(r'^/issues/[^/]+/[^/]+$') # Matches '/issues//' ] -# Before request handler to check for the presence of the secret key -@app.before_request -def check_secret_key(): - for route_pattern in protected_routes: - if route_pattern.match(request.path): - secret_key = request.headers.get('X-Secret-Key') - if secret_key != SECRET_KEY: - return jsonify({'message': 'Unauthorized access'}), 401 - break # Stop checking if the current route matches @app.route('/greeting', methods=['GET']) def greeting(): @@ -324,5 +315,18 @@ def get_issues_by_owner_id(owner, issue): except Exception as e: return jsonify({'error': str(e)}), 500 + + +# Before request handler to check for the presence of the secret key +@app.before_request +def check_secret_key(): + for route_pattern in protected_routes: + if route_pattern.match(request.path): + secret_key = request.headers.get('X-Secret-Key') + if secret_key != SECRET_KEY: + return jsonify({'message': 'Unauthorized access'}), 401 + break # Stop checking if the current route matches + + if __name__ == '__main__': app.run(debug=True) \ No newline at end of file From 769ac1e5ac820e7e1b3288ce8f166296d38c4da7 Mon Sep 17 00:00:00 2001 From: sasi Date: Wed, 29 May 2024 21:52:14 +0530 Subject: [PATCH 26/76] API base changes --- app.py | 127 ++++++++-------------------------------- utils.py | 172 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 195 insertions(+), 104 deletions(-) create mode 100644 utils.py diff --git a/app.py b/app.py index 035ca82..e378921 100644 --- a/app.py +++ b/app.py @@ -2,13 +2,13 @@ from db import SupabaseInterface from collections import defaultdict from flasgger import Swagger -import re,markdown2,requests,os +import re,os +from utils import * app = Flask(__name__) Swagger(app) - GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') headers = { @@ -18,7 +18,6 @@ } - # Define a list of routes that should be protected protected_routes = ['/greeting', '/get-data', '/issues', '/issues/', '/issues//'] SECRET_KEY =os.getenv('SECRET_KEY') @@ -79,24 +78,8 @@ def get_data(): except Exception as e: return jsonify({'error': str(e)}), 500 -def group_by_owner(data): - grouped_data = defaultdict(list) - for record in data: - owner = record['owner'] - grouped_data[owner].append(record) - - - #Arrange data as reponse format - res = [] - for val in grouped_data: - dict_ = {} - dict_['org_name'] = val - dict_['issues'] = grouped_data[val] - - res.append(dict_) - - return {"issues":res} + @app.route('/issues', methods=['GET']) def get_issues(): """ @@ -122,8 +105,21 @@ def get_issues(): try: response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').execute() data = response.data - grouped_data = group_by_owner(data) + + #group data based on issues + grouped_data = defaultdict(list) + for record in data: + issue_url = record['issue_url'] + grouped_data[issue_url].append({ + 'id': record['id'], + 'name': record['body_text'] + }) + + result = [{'issue_url': issue_url, 'issues': issues} for issue_url, issues in grouped_data.items()] + + grouped_data = group_by_owner(result) return jsonify(grouped_data) + except Exception as e: return jsonify({'error': str(e)}), 500 @@ -154,7 +150,8 @@ def get_issues_by_owner(owner): type: string """ try: - response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('owner', owner).execute() + response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('owner', owner).order('comment_updated_at', desc=True).execute() + if not response.data: return jsonify({'error': "No data found"}), 500 data = response.data @@ -166,86 +163,6 @@ def get_issues_by_owner(owner): -def find_week_avg(url): - # url = "https://api.github.com/repos/VedantKhairnar/dmp-backend-test-repo/issues/comments" - - response = requests.get(url,headers=headers) - - if response.status_code == 200: - issue_details = response.json() - plain_text_body = markdown2.markdown(issue_details[0]['body']) - - tasks = re.findall(r'\[(x| )\]', plain_text_body) - total_tasks = len(tasks) - completed_tasks = tasks.count('x') - - avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 - - #find weekly goal html urls - w_goal_url = None - w_learn_url = None - - for item in issue_details: - if "Weekly Goals" in item['body']: - w_goal_url = item['html_url'] - if "Weekly Learnings" in item['body']: - w_learn_url = item['html_url'] - - return avg,issue_details[0]['user']['login'],issue_details[0]['user']['id'],w_goal_url,w_learn_url - - -@app.route('/mentors', methods=['GET']) -def find_mentors(url): - response = requests.get(url,headers=headers) - - if response.status_code == 200: - issue_details = response.json() - - issue_body = issue_details['body'] - pattern = r"## Mentors\s*([\s\S]+?)\s*##" - match = re.search(pattern, issue_body) - - if match: - mentors_text = match.group(1).strip() - # Extract individual mentor usernames - mentors = [mentor.strip() for mentor in mentors_text.split(',')] - else: - mentors = [] - api_base_url = "https://api.github.com/users/" - - ment_username = [] - for val in mentors: - url = f"{api_base_url}{val[1:]}" - username = requests.get(url) - ment_username.append(username.json()['login']) - - return mentors,ment_username - else: - return [],[] - -def get_pr_details(url): - try: - issue_url = url - url_parts = issue_url.split("/") - owner = url_parts[4] - repo = url_parts[5] - issue_number = url_parts[7] - - # GitHub API endpoint to get pull requests for the repository - pulls_url = f"https://api.github.com/repos/{owner}/{repo}/pulls" - - # Send GET request to GitHub API with authentication - response = requests.get(pulls_url, headers=headers) - if response.status_code == 200: - pulls = response.json() - return pulls - else: - return [] - - - except Exception as e: - raise Exception - @app.route('/issues//', methods=['GET']) def get_issues_by_owner_id(owner, issue): @@ -287,12 +204,14 @@ def get_issues_by_owner_id(owner, issue): data = response.data final_data = [] - for val in data: issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) week_avg ,cont_name,cont_id,w_goal,w_learn = find_week_avg(issue_url) + mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} + + mentors = mentors_data['mentors'] + ment_usernames = mentors_data['mentor_usernames'] - mentors,ment_usernames = find_mentors(val['issue_url']) if val['issue_url'] else [],[] res = { "name": owner, "description": None, diff --git a/utils.py b/utils.py new file mode 100644 index 0000000..1fd5f0a --- /dev/null +++ b/utils.py @@ -0,0 +1,172 @@ +import requests,re,markdown2,os +from collections import defaultdict + +GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') + +headers = { + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {GITHUB_TOKEN}", + "X-GitHub-Api-Version": "2022-11-28" + } + + + +def find_org_data(url): + try: + url_parts = url.split("/") + owner = url_parts[4] + repo = url_parts[5] + + # Fetch repository details to get organization info + repo_url = f"https://api.github.com/repos/{owner}/{repo}" + repo_response = requests.get(repo_url, headers=headers) + repo_data = repo_response.json() + if repo_data: + org_name = repo_data['owner']['login'] + org_id = repo_data['owner']['id'] + else: + org_name = None + org_id = None + return {"org_id":org_id,"org_name":org_name} + + except Exception as e: + return {"org_id":None,"org_name":None} + + + + +def get_issue_details(issue_url): + url_parts = issue_url.split("/") + owner = url_parts[4] + repo = url_parts[5] + issue_number = url_parts[6] + + # GitHub API endpoint to get the issue details + issue_api_url = f"https://api.github.com/repos/{owner}/{repo}/issues" + + # Send GET request to GitHub API with authentication + response = requests.get(issue_api_url, headers=headers) + if response.status_code == 200: + issue_data = response.json() + return [{'id': issue['id'], 'name': issue['title']} for issue in issue_data] + else: + return {'id': None, 'name': None} + + +def group_by_owner(data): + res = [] + for record in data: + org_data = find_org_data(record['issue_url']) + dict_ = {} + dict_['org_name'] = org_data['org_name'] + dict_['org_id'] = org_data['org_id'] + dict_['issues'] = get_issue_details(record['issue_url']) + res.append(dict_) + + + org_dict = defaultdict(lambda: {'issues': [], 'org_id': None, 'org_name': None}) + for entry in res: + org_id = entry['org_id'] + org_name = entry['org_name'] + + org_dict[org_id]['issues'].extend(entry['issues']) + org_dict[org_id]['org_id'] = org_id + org_dict[org_id]['org_name'] = org_name + + return list(org_dict.values()) + + +def find_week_avg(url): + + response = requests.get(url,headers=headers) + if response.status_code == 200: + issue_details = response.json() + + for item in issue_details: + + if "Weekly Goals" in item['body']: + w_goal_url = item['html_url'] + plain_text_body = markdown2.markdown(issue_details[0]['body']) + + tasks = re.findall(r'\[(x| )\]', plain_text_body) + total_tasks = len(tasks) + completed_tasks = tasks.count('x') + + avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + + #find weekly goal html urls + w_goal_url = None + w_learn_url = None + else: + avg = 0 + + #find weekly goal html urls + w_goal_url = None + w_learn_url = None + + + if "Weekly Learnings" in item['body']: + w_learn_url = item['html_url'] + + return avg,issue_details[0]['user']['login'],issue_details[0]['user']['id'],w_goal_url,w_learn_url + + +def find_mentors(url): + response = requests.get(url,headers=headers) + + if response.status_code == 200: + issue_details = response.json() + + issue_body = issue_details['body'] + pattern = r"## Mentors\s*([\s\S]+?)\s*##" + match = re.search(pattern, issue_body) + + if match: + mentors_text = match.group(1).strip() + # Extract individual mentor usernames + mentors = [mentor.strip() for mentor in mentors_text.split(',')] + else: + mentors = [] + api_base_url = "https://api.github.com/users/" + + ment_username = [] + for val in mentors: + url = f"{api_base_url}{val[1:]}" + username = requests.get(url) + + ment_username.append(username.json()['login']) + return { + 'mentors': mentors, + 'mentor_usernames': ment_username + } + else: + return { + 'mentors': [], + 'mentor_usernames': [] + } + +def get_pr_details(url): + try: + issue_url = url + url_parts = issue_url.split("/") + owner = url_parts[4] + repo = url_parts[5] + issue_number = url_parts[7] + + # GitHub API endpoint to get pull requests for the repository + pulls_url = f"https://api.github.com/repos/{owner}/{repo}/pulls" + + # Send GET request to GitHub API with authentication + response = requests.get(pulls_url, headers=headers) + if response.status_code == 200: + pulls = response.json() + return pulls + else: + return [] + + + except Exception as e: + raise Exception + + + From cdd84b5f454ee03db03e6c4250f7fcfb30fc54f3 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 30 May 2024 14:53:17 +0530 Subject: [PATCH 27/76] api changes & cors added --- app.py | 27 ++++++++++++++--- requirements.txt | 3 +- utils.py | 79 +++++++++++++++++++++++++++++++++++++----------- 3 files changed, 87 insertions(+), 22 deletions(-) diff --git a/app.py b/app.py index e378921..136c126 100644 --- a/app.py +++ b/app.py @@ -4,9 +4,11 @@ from flasgger import Swagger import re,os from utils import * +from flask_cors import CORS -app = Flask(__name__) +app = Flask(__name__) +CORS(app) Swagger(app) GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') @@ -105,7 +107,7 @@ def get_issues(): try: response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').execute() data = response.data - + #group data based on issues grouped_data = defaultdict(list) for record in data: @@ -206,7 +208,7 @@ def get_issues_by_owner_id(owner, issue): final_data = [] for val in data: issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) - week_avg ,cont_name,cont_id,w_goal,w_learn = find_week_avg(issue_url) + week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs = find_week_avg(issue_url) mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} mentors = mentors_data['mentors'] @@ -214,7 +216,7 @@ def get_issues_by_owner_id(owner, issue): res = { "name": owner, - "description": None, + "description": mentors_data['desc'], "mentor_name": ment_usernames, "mentor_id": mentors, "contributor_name":cont_name , @@ -227,7 +229,24 @@ def get_issues_by_owner_id(owner, issue): "issue_url":val['issue_url'], "pr_details":get_pr_details(val['issue_url']) } + + transformed = {"pr_details": []} + + for pr in res.get("pr_details", []): + transformed["pr_details"].append({ + "id": pr.get("id", ""), + "name": pr.get("title", ""), + "week": pr.get("week", ""), + "link": pr.get("html_url", ""), + "status": pr.get("state", "") + }) + + res['pr_details'] = transformed + # Adding each week as a separate key + # for week in weekby_avgs: + # res.update(week) + # final_data.append(res) return jsonify(res),200 diff --git a/requirements.txt b/requirements.txt index 8054db6..98244a7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,4 +5,5 @@ supabase==2.4.5 gunicorn==22.0.0 flasgger==0.9.7.1 markdown2==2.4.13 -requests==2.32.2 \ No newline at end of file +requests==2.32.2 +flask-cors==4.0.1 \ No newline at end of file diff --git a/utils.py b/utils.py index 1fd5f0a..dd492c6 100644 --- a/utils.py +++ b/utils.py @@ -48,10 +48,11 @@ def get_issue_details(issue_url): response = requests.get(issue_api_url, headers=headers) if response.status_code == 200: issue_data = response.json() - return [{'id': issue['id'], 'name': issue['title']} for issue in issue_data] + return [{'id': issue['id'], 'name': issue['title']} for issue in issue_data if "pull_request" not in issue] else: return {'id': None, 'name': None} + def group_by_owner(data): res = [] @@ -74,14 +75,61 @@ def group_by_owner(data): org_dict[org_id]['org_name'] = org_name return list(org_dict.values()) + + +def find_week_data(issue_details): + try: + #find how many weeks in reponse + weekly_updates = [] + for item in issue_details: + if "Weekly Goals" in item["body"]: + week_match = re.search(r'Week \d+', item["body"]) + if week_match: + weekly_updates.append({ + "id": item["id"], + "val":item, + "week": week_match.group(0) + }) + + val = [] + + for week in weekly_updates: + + plain_text_body = markdown2.markdown(week['val']['body']) + + tasks = re.findall(r'\[(x| )\]', plain_text_body) + total_tasks = len(tasks) + completed_tasks = tasks.count('x') + + avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + + # week['avg'] = avg + # week['val'] = None + week[str(week['week'])+' percentage'] = avg + del week['val'] + del week['id'] + del week['week'] + val.append(week) + + return val + + except Exception as e: + return {} + def find_week_avg(url): response = requests.get(url,headers=headers) if response.status_code == 200: - issue_details = response.json() + issue_details = response.json() + # week_avgs = find_week_data(issue_details) phase 2 + week_avgs = None + + w_learn_url = None + w_goal_url = None + avg = 0 for item in issue_details: if "Weekly Goals" in item['body']: @@ -93,22 +141,12 @@ def find_week_avg(url): completed_tasks = tasks.count('x') avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 - - #find weekly goal html urls - w_goal_url = None - w_learn_url = None - else: - avg = 0 - - #find weekly goal html urls - w_goal_url = None - w_learn_url = None - - + if "Weekly Learnings" in item['body']: w_learn_url = item['html_url'] + - return avg,issue_details[0]['user']['login'],issue_details[0]['user']['id'],w_goal_url,w_learn_url + return avg,issue_details[0]['user']['login'],issue_details[0]['user']['id'],w_goal_url,w_learn_url,week_avgs def find_mentors(url): @@ -119,6 +157,11 @@ def find_mentors(url): issue_body = issue_details['body'] pattern = r"## Mentors\s*([\s\S]+?)\s*##" + disc_pattern = r"## Desc 1\s*([\s\S]+?)\s*##" + disc_match = re.search(disc_pattern, issue_body) + + disc_text = disc_match.group(1).strip() if disc_match else None + match = re.search(pattern, issue_body) if match: @@ -137,12 +180,14 @@ def find_mentors(url): ment_username.append(username.json()['login']) return { 'mentors': mentors, - 'mentor_usernames': ment_username + 'mentor_usernames': ment_username, + 'desc':disc_text } else: return { 'mentors': [], - 'mentor_usernames': [] + 'mentor_usernames': [], + 'desc':None } def get_pr_details(url): From 5feb288d9b97275399f34645a87cd1e711044b3c Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 30 May 2024 14:56:01 +0530 Subject: [PATCH 28/76] keys changed --- utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils.py b/utils.py index dd492c6..4360c8d 100644 --- a/utils.py +++ b/utils.py @@ -48,7 +48,7 @@ def get_issue_details(issue_url): response = requests.get(issue_api_url, headers=headers) if response.status_code == 200: issue_data = response.json() - return [{'id': issue['id'], 'name': issue['title']} for issue in issue_data if "pull_request" not in issue] + return [{'id': issue['id'], 'name': issue['title'],'html_url':issue['html_url']} for issue in issue_data if "pull_request" not in issue] else: return {'id': None, 'name': None} From 78ed104d298f80ce6ab2ce954eeaffebde27f904 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 30 May 2024 16:30:41 +0530 Subject: [PATCH 29/76] cors added --- app.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index 136c126..c9fe579 100644 --- a/app.py +++ b/app.py @@ -8,7 +8,8 @@ app = Flask(__name__) -CORS(app) +CORS(app, resources={r"/*": {"origins": "*"}}) + Swagger(app) GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') @@ -256,7 +257,7 @@ def get_issues_by_owner_id(owner, issue): # Before request handler to check for the presence of the secret key -@app.before_request +# @app.before_request def check_secret_key(): for route_pattern in protected_routes: if route_pattern.match(request.path): From a4e774629bf3d6029cc511ffa15e68337f178014 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 30 May 2024 16:34:35 +0530 Subject: [PATCH 30/76] changes --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index c9fe579..a908206 100644 --- a/app.py +++ b/app.py @@ -257,7 +257,7 @@ def get_issues_by_owner_id(owner, issue): # Before request handler to check for the presence of the secret key -# @app.before_request +@app.before_request def check_secret_key(): for route_pattern in protected_routes: if route_pattern.match(request.path): From fc559027000600e8c8410d8aa56f553a829fb641 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 30 May 2024 16:41:33 +0530 Subject: [PATCH 31/76] cors added - 2 --- app.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/app.py b/app.py index a908206..a166b78 100644 --- a/app.py +++ b/app.py @@ -4,11 +4,13 @@ from flasgger import Swagger import re,os from utils import * -from flask_cors import CORS +from flask_cors import CORS,cross_origin app = Flask(__name__) -CORS(app, resources={r"/*": {"origins": "*"}}) +# CORS(app, resources={r"/*": {"origins": "*"}}) +CORS(app, support_credentials=True) + Swagger(app) @@ -33,7 +35,7 @@ re.compile(r'^/issues/[^/]+/[^/]+$') # Matches '/issues//' ] - +@cross_origin() # added this to my endpoint @app.route('/greeting', methods=['GET']) def greeting(): """ @@ -54,6 +56,7 @@ def greeting(): } return jsonify(response) +@cross_origin() @app.route('/get-data', methods=['GET']) def get_data(): """ @@ -82,7 +85,7 @@ def get_data(): return jsonify({'error': str(e)}), 500 - +@cross_origin() @app.route('/issues', methods=['GET']) def get_issues(): """ @@ -125,7 +128,8 @@ def get_issues(): except Exception as e: return jsonify({'error': str(e)}), 500 - + +@cross_origin() @app.route('/issues/', methods=['GET']) def get_issues_by_owner(owner): """ @@ -166,7 +170,7 @@ def get_issues_by_owner(owner): - +@cross_origin() @app.route('/issues//', methods=['GET']) def get_issues_by_owner_id(owner, issue): """ From 7b821d77773de165f8e4fbadf20a5d1748696a76 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 30 May 2024 16:48:38 +0530 Subject: [PATCH 32/76] cors - 3 --- app.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/app.py b/app.py index a166b78..a9bb784 100644 --- a/app.py +++ b/app.py @@ -35,8 +35,8 @@ re.compile(r'^/issues/[^/]+/[^/]+$') # Matches '/issues//' ] -@cross_origin() # added this to my endpoint @app.route('/greeting', methods=['GET']) +@cross_origin() # added this to my endpoint def greeting(): """ A simple greeting endpoint. @@ -56,8 +56,8 @@ def greeting(): } return jsonify(response) -@cross_origin() @app.route('/get-data', methods=['GET']) +@cross_origin() def get_data(): """ Fetch data from Supabase. @@ -85,8 +85,8 @@ def get_data(): return jsonify({'error': str(e)}), 500 -@cross_origin() @app.route('/issues', methods=['GET']) +@cross_origin() def get_issues(): """ Fetch all issues and group by owner. @@ -129,8 +129,8 @@ def get_issues(): except Exception as e: return jsonify({'error': str(e)}), 500 -@cross_origin() @app.route('/issues/', methods=['GET']) +@cross_origin() def get_issues_by_owner(owner): """ Fetch issues by owner. @@ -170,8 +170,8 @@ def get_issues_by_owner(owner): -@cross_origin() @app.route('/issues//', methods=['GET']) +@cross_origin() def get_issues_by_owner_id(owner, issue): """ Fetch issues by owner and issue number. From 32f6a9284f1fb605ebb8654ff1aafa1a520d6c8c Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 30 May 2024 16:58:12 +0530 Subject: [PATCH 33/76] auth removed --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index a9bb784..8b20b40 100644 --- a/app.py +++ b/app.py @@ -261,7 +261,7 @@ def get_issues_by_owner_id(owner, issue): # Before request handler to check for the presence of the secret key -@app.before_request +# @app.before_request def check_secret_key(): for route_pattern in protected_routes: if route_pattern.match(request.path): From 636e808a3a3863d84ddb3bfbaf5b330d0a849ad6 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 31 May 2024 19:39:17 +0530 Subject: [PATCH 34/76] api changes --- app.py | 34 +++++++++----------------- utils.py | 73 ++++++++++++++++++++++++++++++++++++++++---------------- 2 files changed, 64 insertions(+), 43 deletions(-) diff --git a/app.py b/app.py index 8b20b40..c6946eb 100644 --- a/app.py +++ b/app.py @@ -38,19 +38,6 @@ @app.route('/greeting', methods=['GET']) @cross_origin() # added this to my endpoint def greeting(): - """ - A simple greeting endpoint. - --- - responses: - 200: - description: A greeting message - schema: - type: object - properties: - message: - type: string - example: Hello, welcome to my API! - """ response = { 'message': 'Hello, welcome to my API!' } @@ -158,13 +145,14 @@ def get_issues_by_owner(owner): """ try: response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('owner', owner).order('comment_updated_at', desc=True).execute() - if not response.data: return jsonify({'error': "No data found"}), 500 - data = response.data - filtered_data = [{key: item[key] for key in ['owner','body_text']} for item in data] - data = [{**{"name": item.pop("owner"),"description": item.pop("body_text")}, **item} for item in filtered_data] - return jsonify(data) + data = response.data[0] + repo_details = get_repo_details(data['owner'],data['repo']) + org_name = repo_details.get('owner', {}).get('login', 'N/A') + org_desc = repo_details.get('description', 'N/A') + return jsonify({"name": org_name, "description": org_desc}) + except Exception as e: return jsonify({'error': str(e)}), 500 @@ -213,7 +201,7 @@ def get_issues_by_owner_id(owner, issue): final_data = [] for val in data: issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) - week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs = find_week_avg(issue_url) + week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url) mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} mentors = mentors_data['mentors'] @@ -227,7 +215,7 @@ def get_issues_by_owner_id(owner, issue): "contributor_name":cont_name , "contributor_id": cont_id, "org_name": val['owner'], - "org_link": val['repo'], + "org_link": org_link, "weekly_goals_html": w_goal, "weekly_learnings_html": w_learn, "overall_progress": week_avg, @@ -236,14 +224,14 @@ def get_issues_by_owner_id(owner, issue): } transformed = {"pr_details": []} - + for pr in res.get("pr_details", []): transformed["pr_details"].append({ "id": pr.get("id", ""), "name": pr.get("title", ""), - "week": pr.get("week", ""), + "week": determine_week(pr['created_at']), "link": pr.get("html_url", ""), - "status": pr.get("state", "") + "status": pr.get("state", ""), }) res['pr_details'] = transformed diff --git a/utils.py b/utils.py index 4360c8d..c2f9cf9 100644 --- a/utils.py +++ b/utils.py @@ -1,5 +1,8 @@ import requests,re,markdown2,os from collections import defaultdict +from datetime import datetime, timedelta +from dateutil import parser + GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') @@ -146,7 +149,7 @@ def find_week_avg(url): w_learn_url = item['html_url'] - return avg,issue_details[0]['user']['login'],issue_details[0]['user']['id'],w_goal_url,w_learn_url,week_avgs + return avg,issue_details[0]['user']['login'],issue_details[0]['user']['id'],w_goal_url,w_learn_url,week_avgs,issue_details[0]['user']['html_url'] def find_mentors(url): @@ -190,28 +193,58 @@ def find_mentors(url): 'desc':None } -def get_pr_details(url): - try: - issue_url = url - url_parts = issue_url.split("/") - owner = url_parts[4] - repo = url_parts[5] - issue_number = url_parts[7] +def get_pr_details(url): + try: + issue_url = url + url_parts = issue_url.split("/") + owner = url_parts[4] + repo = url_parts[5] + issue_number = url_parts[7] + + # GitHub API endpoint to get pull requests for the repository + pulls_url = f"https://api.github.com/repos/{owner}/{repo}/pulls" + + # Send GET request to GitHub API with authentication + response = requests.get(pulls_url, headers=headers) + if response.status_code == 200: + pulls = response.json() + return pulls + else: + return [] + + + except Exception as e: + raise Exception + + - # GitHub API endpoint to get pull requests for the repository - pulls_url = f"https://api.github.com/repos/{owner}/{repo}/pulls" - # Send GET request to GitHub API with authentication - response = requests.get(pulls_url, headers=headers) +def get_repo_details(owner, repo): + url = f"https://api.github.com/repos/{owner}/{repo}" + response = requests.get(url) if response.status_code == 200: - pulls = response.json() - return pulls + return response.json() else: - return [] + return None + + + +def determine_week(input_date_str, start_date_str='2024-06-11'): + try: + # Convert the start date string to a datetime object + start_date = datetime.strptime(start_date_str, '%Y-%m-%d') + input_date = parser.parse(input_date_str).replace(tzinfo=None) + # Calculate the difference in days + difference_in_days = (input_date - start_date).days + if difference_in_days < 0: + return "Week 0" + week_number = (difference_in_days // 7) + 1 - except Exception as e: - raise Exception - - - + return f"Week {week_number}" + + except Exception as e: + return "Week -1" + + + From 571f27b696ace42348dc4c20feaa57ccecaab9fd Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 31 May 2024 19:47:05 +0530 Subject: [PATCH 35/76] middleware added --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index c6946eb..4217a40 100644 --- a/app.py +++ b/app.py @@ -249,7 +249,7 @@ def get_issues_by_owner_id(owner, issue): # Before request handler to check for the presence of the secret key -# @app.before_request +@app.before_request def check_secret_key(): for route_pattern in protected_routes: if route_pattern.match(request.path): From a6dd91c0e6ffaa4d89f4051dea11bccc398ff8d9 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 31 May 2024 20:58:38 +0530 Subject: [PATCH 36/76] cors checking --- app.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/app.py b/app.py index 4217a40..73a191f 100644 --- a/app.py +++ b/app.py @@ -8,8 +8,7 @@ app = Flask(__name__) -# CORS(app, resources={r"/*": {"origins": "*"}}) -CORS(app, support_credentials=True) +CORS(app, resources={r"/*": {"origins": "*"}}, support_credentials=True) Swagger(app) @@ -36,7 +35,7 @@ ] @app.route('/greeting', methods=['GET']) -@cross_origin() # added this to my endpoint +@cross_origin(supports_credentials=True) # added this to my endpoint def greeting(): response = { 'message': 'Hello, welcome to my API!' @@ -44,7 +43,7 @@ def greeting(): return jsonify(response) @app.route('/get-data', methods=['GET']) -@cross_origin() +@cross_origin(supports_credentials=True) def get_data(): """ Fetch data from Supabase. @@ -73,7 +72,7 @@ def get_data(): @app.route('/issues', methods=['GET']) -@cross_origin() +@cross_origin(supports_credentials=True) def get_issues(): """ Fetch all issues and group by owner. @@ -117,7 +116,7 @@ def get_issues(): return jsonify({'error': str(e)}), 500 @app.route('/issues/', methods=['GET']) -@cross_origin() +@cross_origin(supports_credentials=True) def get_issues_by_owner(owner): """ Fetch issues by owner. @@ -159,7 +158,7 @@ def get_issues_by_owner(owner): @app.route('/issues//', methods=['GET']) -@cross_origin() +@cross_origin(supports_credentials=True) def get_issues_by_owner_id(owner, issue): """ Fetch issues by owner and issue number. From 16632a81fb5facec0c0cd1a01a73be89e873119f Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 31 May 2024 21:06:04 +0530 Subject: [PATCH 37/76] change 1 --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 73a191f..0537291 100644 --- a/app.py +++ b/app.py @@ -8,7 +8,7 @@ app = Flask(__name__) -CORS(app, resources={r"/*": {"origins": "*"}}, support_credentials=True) +CORS(app, resources={r"/*": {"origins": "http://localhost:4200"}}, supports_credentials=True) Swagger(app) From 13437024c1460688baab76ac45c8092fec21fbe4 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 31 May 2024 21:15:35 +0530 Subject: [PATCH 38/76] changes --- app.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/app.py b/app.py index 0537291..0d6b7e4 100644 --- a/app.py +++ b/app.py @@ -256,7 +256,14 @@ def check_secret_key(): if secret_key != SECRET_KEY: return jsonify({'message': 'Unauthorized access'}), 401 break # Stop checking if the current route matches - + +@app.after_request +def handle_options(response): + response.headers["Access-Control-Allow-Origin"] = "*" + response.headers["Access-Control-Allow-Methods"] = "GET, POST, PUT, DELETE, OPTIONS" + response.headers["Access-Control-Allow-Headers"] = "Content-Type, X-Requested-With" + + return response if __name__ == '__main__': app.run(debug=True) \ No newline at end of file From 770882a4c54b0df0c168590138172105298f380e Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 31 May 2024 21:26:26 +0530 Subject: [PATCH 39/76] change 3 --- app.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/app.py b/app.py index 0d6b7e4..a83ae91 100644 --- a/app.py +++ b/app.py @@ -5,6 +5,7 @@ import re,os from utils import * from flask_cors import CORS,cross_origin +from functools import wraps app = Flask(__name__) @@ -41,6 +42,18 @@ def greeting(): 'message': 'Hello, welcome to my API!' } return jsonify(response) + + + +# Custom decorator to validate secret key +def require_secret_key(f): + @wraps(f) + def decorated_function(*args, **kwargs): + secret_key = request.headers.get('X-Secret-Key') + if secret_key != SECRET_KEY: + return jsonify({'message': 'Unauthorized access'}), 401 + return f(*args, **kwargs) + return decorated_function @app.route('/get-data', methods=['GET']) @cross_origin(supports_credentials=True) @@ -73,6 +86,7 @@ def get_data(): @app.route('/issues', methods=['GET']) @cross_origin(supports_credentials=True) +@require_secret_key def get_issues(): """ Fetch all issues and group by owner. @@ -248,7 +262,7 @@ def get_issues_by_owner_id(owner, issue): # Before request handler to check for the presence of the secret key -@app.before_request +# @app.before_request def check_secret_key(): for route_pattern in protected_routes: if route_pattern.match(request.path): From 964687aa08e9afc5afa89b67f5bbbb82358a6a31 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 31 May 2024 21:31:48 +0530 Subject: [PATCH 40/76] change 4 --- app.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/app.py b/app.py index a83ae91..31f02c0 100644 --- a/app.py +++ b/app.py @@ -271,13 +271,6 @@ def check_secret_key(): return jsonify({'message': 'Unauthorized access'}), 401 break # Stop checking if the current route matches -@app.after_request -def handle_options(response): - response.headers["Access-Control-Allow-Origin"] = "*" - response.headers["Access-Control-Allow-Methods"] = "GET, POST, PUT, DELETE, OPTIONS" - response.headers["Access-Control-Allow-Headers"] = "Content-Type, X-Requested-With" - - return response if __name__ == '__main__': app.run(debug=True) \ No newline at end of file From 98784aa00ba188bbd3a7c2d6245cf8377bb40de1 Mon Sep 17 00:00:00 2001 From: sasi Date: Mon, 3 Jun 2024 17:05:52 +0530 Subject: [PATCH 41/76] pr_details removed & body added --- app.py | 6 +++--- utils.py | 26 ++++++++++++++------------ 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/app.py b/app.py index 31f02c0..a962847 100644 --- a/app.py +++ b/app.py @@ -122,7 +122,7 @@ def get_issues(): }) result = [{'issue_url': issue_url, 'issues': issues} for issue_url, issues in grouped_data.items()] - + grouped_data = group_by_owner(result) return jsonify(grouped_data) @@ -232,7 +232,7 @@ def get_issues_by_owner_id(owner, issue): "weekly_goals_html": w_goal, "weekly_learnings_html": w_learn, "overall_progress": week_avg, - "issue_url":val['issue_url'], + "issue_url":val['html_url'], "pr_details":get_pr_details(val['issue_url']) } @@ -247,7 +247,7 @@ def get_issues_by_owner_id(owner, issue): "status": pr.get("state", ""), }) - res['pr_details'] = transformed + res['pr_details'] = transformed['pr_details'] # Adding each week as a separate key # for week in weekby_avgs: diff --git a/utils.py b/utils.py index c2f9cf9..bc24dc7 100644 --- a/utils.py +++ b/utils.py @@ -51,9 +51,9 @@ def get_issue_details(issue_url): response = requests.get(issue_api_url, headers=headers) if response.status_code == 200: issue_data = response.json() - return [{'id': issue['id'], 'name': issue['title'],'html_url':issue['html_url']} for issue in issue_data if "pull_request" not in issue] + return [{'id': issue['id'], 'name': issue['title'],'html_url':issue['html_url'],'issue_number':issue['number']} for issue in issue_data if "pull_request" not in issue] else: - return {'id': None, 'name': None} + return {'id': None, 'name': None ,'html_url':None,'issue_number':None} @@ -68,16 +68,18 @@ def group_by_owner(data): res.append(dict_) - org_dict = defaultdict(lambda: {'issues': [], 'org_id': None, 'org_name': None}) - for entry in res: - org_id = entry['org_id'] - org_name = entry['org_name'] + # org_dict = defaultdict(lambda: {'issues': [], 'org_id': None, 'org_name': None}) + # for entry in res: + # org_id = entry['org_id'] + # org_name = entry['org_name'] - org_dict[org_id]['issues'].extend(entry['issues']) - org_dict[org_id]['org_id'] = org_id - org_dict[org_id]['org_name'] = org_name + # org_dict[org_id]['issues'].extend(entry['issues']) + # org_dict[org_id]['org_id'] = org_id + # org_dict[org_id]['org_name'] = org_name + - return list(org_dict.values()) + # return list(org_dict.values()) + return res def find_week_data(issue_details): @@ -136,7 +138,7 @@ def find_week_avg(url): for item in issue_details: if "Weekly Goals" in item['body']: - w_goal_url = item['html_url'] + w_goal_url = item['body'] plain_text_body = markdown2.markdown(issue_details[0]['body']) tasks = re.findall(r'\[(x| )\]', plain_text_body) @@ -146,7 +148,7 @@ def find_week_avg(url): avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 if "Weekly Learnings" in item['body']: - w_learn_url = item['html_url'] + w_learn_url = item['body'] return avg,issue_details[0]['user']['login'],issue_details[0]['user']['id'],w_goal_url,w_learn_url,week_avgs,issue_details[0]['user']['html_url'] From 0a39decd3766b0d018942b9cff1748aaf0ee2e98 Mon Sep 17 00:00:00 2001 From: sasi Date: Mon, 3 Jun 2024 17:21:03 +0530 Subject: [PATCH 42/76] issue fixes --- app.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/app.py b/app.py index a962847..f6b71bb 100644 --- a/app.py +++ b/app.py @@ -2,7 +2,7 @@ from db import SupabaseInterface from collections import defaultdict from flasgger import Swagger -import re,os +import re,os,traceback from utils import * from flask_cors import CORS,cross_origin from functools import wraps @@ -127,10 +127,12 @@ def get_issues(): return jsonify(grouped_data) except Exception as e: - return jsonify({'error': str(e)}), 500 + error_traceback = traceback.format_exc() + return jsonify({'error': str(e), 'traceback': error_traceback}), 500 @app.route('/issues/', methods=['GET']) @cross_origin(supports_credentials=True) +@require_secret_key def get_issues_by_owner(owner): """ Fetch issues by owner. @@ -162,17 +164,19 @@ def get_issues_by_owner(owner): return jsonify({'error': "No data found"}), 500 data = response.data[0] repo_details = get_repo_details(data['owner'],data['repo']) - org_name = repo_details.get('owner', {}).get('login', 'N/A') - org_desc = repo_details.get('description', 'N/A') + org_name = repo_details['ower']['login'] if repo_details['owner']['login'] else None + org_desc = repo_details['description'] if repo_details['description'] else None return jsonify({"name": org_name, "description": org_desc}) except Exception as e: - return jsonify({'error': str(e)}), 500 + error_traceback = traceback.format_exc() + return jsonify({'error': str(e), 'traceback': error_traceback}), 500 @app.route('/issues//', methods=['GET']) @cross_origin(supports_credentials=True) +@require_secret_key def get_issues_by_owner_id(owner, issue): """ Fetch issues by owner and issue number. @@ -257,7 +261,8 @@ def get_issues_by_owner_id(owner, issue): return jsonify(res),200 except Exception as e: - return jsonify({'error': str(e)}), 500 + error_traceback = traceback.format_exc() + return jsonify({'error': str(e), 'traceback': error_traceback}), 500 From 24bced68564c4edc789a29b9121c1c3613a98fa6 Mon Sep 17 00:00:00 2001 From: sasi Date: Mon, 3 Jun 2024 17:35:03 +0530 Subject: [PATCH 43/76] key change --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index f6b71bb..55606fe 100644 --- a/app.py +++ b/app.py @@ -164,7 +164,7 @@ def get_issues_by_owner(owner): return jsonify({'error': "No data found"}), 500 data = response.data[0] repo_details = get_repo_details(data['owner'],data['repo']) - org_name = repo_details['ower']['login'] if repo_details['owner']['login'] else None + org_name = repo_details['owner']['login'] if repo_details['owner']['login'] else None org_desc = repo_details['description'] if repo_details['description'] else None return jsonify({"name": org_name, "description": org_desc}) From d962e44fbc6853881f72d9e807a5b3534d3ba2c6 Mon Sep 17 00:00:00 2001 From: sasi Date: Mon, 3 Jun 2024 17:43:16 +0530 Subject: [PATCH 44/76] headers added --- utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils.py b/utils.py index bc24dc7..0a8e085 100644 --- a/utils.py +++ b/utils.py @@ -180,7 +180,7 @@ def find_mentors(url): ment_username = [] for val in mentors: url = f"{api_base_url}{val[1:]}" - username = requests.get(url) + username = requests.get(url,headers=headers) ment_username.append(username.json()['login']) return { @@ -223,7 +223,7 @@ def get_pr_details(url): def get_repo_details(owner, repo): url = f"https://api.github.com/repos/{owner}/{repo}" - response = requests.get(url) + response = requests.get(url,headers=headers) if response.status_code == 200: return response.json() else: From 7366bbbaf6ed7fff67d1546052cdcaf5830bbf2c Mon Sep 17 00:00:00 2001 From: sasi Date: Tue, 4 Jun 2024 23:01:04 +0530 Subject: [PATCH 45/76] api logic changes (from supabase) --- app.py | 142 +++++++++++++++++++++++++++++++++++++++---------------- utils.py | 2 +- 2 files changed, 102 insertions(+), 42 deletions(-) diff --git a/app.py b/app.py index 55606fe..300b2d7 100644 --- a/app.py +++ b/app.py @@ -84,6 +84,31 @@ def get_data(): return jsonify({'error': str(e)}), 500 + +@app.route('/v1/issues', methods=['GET']) +def v1get_issues(): + try: + response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').execute() + data = response.data + + #group data based on issues + grouped_data = defaultdict(list) + for record in data: + issue_url = record['issue_url'] + grouped_data[issue_url].append({ + 'id': record['id'], + 'name': record['body_text'] + }) + + result = [{'issue_url': issue_url, 'issues': issues} for issue_url, issues in grouped_data.items()] + grouped_data = group_by_owner(result) + return jsonify(grouped_data) + + except Exception as e: + error_traceback = traceback.format_exc() + return jsonify({'error': str(e), 'traceback': error_traceback}), 500 + + @app.route('/issues', methods=['GET']) @cross_origin(supports_credentials=True) @require_secret_key @@ -109,22 +134,39 @@ def get_issues(): type: string """ try: - response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').execute() - data = response.data - - #group data based on issues + dmp_issue =SupabaseInterface().get_instance().client.table('dmp_issues').select('*').execute().data + + for i in dmp_issue: + val = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('dmp_issue_url',i['repo_url']).execute().data + i['issues'] = val[0] #append first obj ie all are reder same issue + i['org_id'] = val[0]['org_id'] + i['org_name'] = val[0]['org_name'] + + # Create a defaultdict of lists grouped_data = defaultdict(list) - for record in data: - issue_url = record['issue_url'] - grouped_data[issue_url].append({ - 'id': record['id'], - 'name': record['body_text'] - }) + # Group data by 'org_name' + for item in dmp_issue: + grouped_data[item['org_name']].append(item) - result = [{'issue_url': issue_url, 'issues': issues} for issue_url, issues in grouped_data.items()] + response = [] + for org_name, items in grouped_data.items(): + issues = [ + { + "html_url": item['issues']['html_issue_url'], + "id": item['issues']['comment_id'], + "issue_number": item['issues']['issue_number'], + "name": item['issues']['title'] + } + for item in items + ] + + response.append({ + "issues": issues, + "org_id": items[0]['org_id'], + "org_name": org_name + }) - grouped_data = group_by_owner(result) - return jsonify(grouped_data) + return jsonify(response) except Exception as e: error_traceback = traceback.format_exc() @@ -163,10 +205,7 @@ def get_issues_by_owner(owner): if not response.data: return jsonify({'error': "No data found"}), 500 data = response.data[0] - repo_details = get_repo_details(data['owner'],data['repo']) - org_name = repo_details['owner']['login'] if repo_details['owner']['login'] else None - org_desc = repo_details['description'] if repo_details['description'] else None - return jsonify({"name": org_name, "description": org_desc}) + return jsonify({"name": data['org_name'], "description": data['org_description']}) except Exception as e: error_traceback = traceback.format_exc() @@ -210,48 +249,68 @@ def get_issues_by_owner_id(owner, issue): """ try: - response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute() + SUPABASE_DB = SupabaseInterface().get_instance() + response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute() if not response.data: return jsonify({'error': "No data found"}), 500 data = response.data final_data = [] + w_learn_url,w_goal_url,avg,cont_details = None,None,None,None + for val in data: issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) - week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url) - mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} + # week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url) + # mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} - mentors = mentors_data['mentors'] - ment_usernames = mentors_data['mentor_usernames'] + + if "Weekly Goals" in val['body_text'] and not w_goal_url: + w_goal_url = val['body_text'] + plain_text_body = markdown2.markdown(val['body_text']) + + tasks = re.findall(r'\[(x| )\]', plain_text_body) + total_tasks = len(tasks) + completed_tasks = tasks.count('x') + + avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + + if "Weekly Learnings" in val['body_text'] and not w_learn_url: + w_learn_url = val['body_text'] + + # mentors = mentors_data['mentors'] + # ment_usernames = mentors_data['mentor_usernames'] + if not cont_details: + cont_details = SUPABASE_DB.client.table('dmp_issues').select('*').eq('repo_url',val['dmp_issue_url']).execute().data res = { "name": owner, - "description": mentors_data['desc'], - "mentor_name": ment_usernames, - "mentor_id": mentors, - "contributor_name":cont_name , - "contributor_id": cont_id, + "description": val['description'], + "mentor_name": val['mentor_name'], + "mentor_id": val['mentor_id'] , + "contributor_name":cont_details[0]['contributor_name'] , + "contributor_id": cont_details[0]['contributor_id'], "org_name": val['owner'], - "org_link": org_link, - "weekly_goals_html": w_goal, - "weekly_learnings_html": w_learn, - "overall_progress": week_avg, + "org_link": val['org_link'], + "weekly_goals_html": w_goal_url, + "weekly_learnings_html": w_learn_url, + "overall_progress": avg, "issue_url":val['html_url'], - "pr_details":get_pr_details(val['issue_url']) + "pr_details":None } - - transformed = {"pr_details": []} - for pr in res.get("pr_details", []): + pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('repo', val['repo']).execute() + transformed = {"pr_details": []} + if pr_Data.data: + for pr in pr_Data.data: transformed["pr_details"].append({ - "id": pr.get("id", ""), - "name": pr.get("title", ""), + "id": pr.get("pr_id", ""), + "name": pr.get("meta_data", ""), "week": determine_week(pr['created_at']), "link": pr.get("html_url", ""), - "status": pr.get("state", ""), + "status": pr.get("status", ""), }) - - res['pr_details'] = transformed['pr_details'] + + res['pr_details'] = transformed['pr_details'] # Adding each week as a separate key # for week in weekby_avgs: @@ -259,7 +318,8 @@ def get_issues_by_owner_id(owner, issue): # final_data.append(res) - return jsonify(res),200 + return jsonify(res),200 + except Exception as e: error_traceback = traceback.format_exc() return jsonify({'error': str(e), 'traceback': error_traceback}), 500 diff --git a/utils.py b/utils.py index 0a8e085..a43b3d7 100644 --- a/utils.py +++ b/utils.py @@ -57,7 +57,7 @@ def get_issue_details(issue_url): -def group_by_owner(data): +def group_by_owner(data): res = [] for record in data: org_data = find_org_data(record['issue_url']) From 6328ff76d265486fa27ab59c1d7bebb8de4ab6ca Mon Sep 17 00:00:00 2001 From: sasi Date: Tue, 4 Jun 2024 23:54:07 +0530 Subject: [PATCH 46/76] api fix 1st --- app.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/app.py b/app.py index 300b2d7..4ff57aa 100644 --- a/app.py +++ b/app.py @@ -136,16 +136,21 @@ def get_issues(): try: dmp_issue =SupabaseInterface().get_instance().client.table('dmp_issues').select('*').execute().data + updated_issues = [] + for i in dmp_issue: val = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('dmp_issue_url',i['repo_url']).execute().data - i['issues'] = val[0] #append first obj ie all are reder same issue - i['org_id'] = val[0]['org_id'] - i['org_name'] = val[0]['org_name'] + if val!=[]: + i['issues'] = val[0] #append first obj ie all are reder same issue + i['org_id'] = val[0]['org_id'] + i['org_name'] = val[0]['org_name'] + + updated_issues.append(i) # Create a defaultdict of lists grouped_data = defaultdict(list) # Group data by 'org_name' - for item in dmp_issue: + for item in updated_issues: grouped_data[item['org_name']].append(item) response = [] From f46998f8b5e425bd18bc7133e82367148d4b7c50 Mon Sep 17 00:00:00 2001 From: sasi Date: Wed, 5 Jun 2024 11:30:20 +0530 Subject: [PATCH 47/76] cors origin added --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 4ff57aa..b378096 100644 --- a/app.py +++ b/app.py @@ -9,7 +9,7 @@ app = Flask(__name__) -CORS(app, resources={r"/*": {"origins": "http://localhost:4200"}}, supports_credentials=True) +CORS(app,supports_credentials=True) Swagger(app) From bba7ae99c2e5f59333ba3cb7c9f5a0723a40d9a8 Mon Sep 17 00:00:00 2001 From: sasi Date: Wed, 5 Jun 2024 14:24:35 +0530 Subject: [PATCH 48/76] env keys remaned --- app.py | 4 ++-- db.py | 4 ++-- utils.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/app.py b/app.py index b378096..36b9383 100644 --- a/app.py +++ b/app.py @@ -14,7 +14,7 @@ Swagger(app) -GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') +GITHUB_TOKEN =os.getenv('APP_DEV_GITHUB_TOKEN') headers = { "Accept": "application/vnd.github+json", @@ -25,7 +25,7 @@ # Define a list of routes that should be protected protected_routes = ['/greeting', '/get-data', '/issues', '/issues/', '/issues//'] -SECRET_KEY =os.getenv('SECRET_KEY') +SECRET_KEY =os.getenv('APP_DEV_SECRET_KEY') protected_routes = [ re.compile(r'^/greeting$'), diff --git a/db.py b/db.py index 6abec14..491596d 100644 --- a/db.py +++ b/db.py @@ -19,8 +19,8 @@ def __init__(self): from dotenv import load_dotenv load_dotenv() - SUPABASE_URL = os.getenv('SUPABASE_URL') - SUPABASE_KEY = os.getenv('SUPABASE_KEY') + SUPABASE_URL = os.getenv('APP_DEV_SUPABASE_URL') + SUPABASE_KEY = os.getenv('APP_DEV_SUPABASE_KEY') self.client: Client = create_client(SUPABASE_URL, SUPABASE_KEY) SupabaseInterface._instance = self else: diff --git a/utils.py b/utils.py index a43b3d7..b0730c5 100644 --- a/utils.py +++ b/utils.py @@ -4,7 +4,7 @@ from dateutil import parser -GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') +GITHUB_TOKEN =os.getenv('APP_DEV_GITHUB_TOKEN') headers = { "Accept": "application/vnd.github+json", From 259791057ab3833f752a3eedab681268bc7187f4 Mon Sep 17 00:00:00 2001 From: sasi Date: Wed, 5 Jun 2024 14:42:47 +0530 Subject: [PATCH 49/76] env renamed --- app.py | 4 ++-- db.py | 4 ++-- utils.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/app.py b/app.py index 36b9383..b378096 100644 --- a/app.py +++ b/app.py @@ -14,7 +14,7 @@ Swagger(app) -GITHUB_TOKEN =os.getenv('APP_DEV_GITHUB_TOKEN') +GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') headers = { "Accept": "application/vnd.github+json", @@ -25,7 +25,7 @@ # Define a list of routes that should be protected protected_routes = ['/greeting', '/get-data', '/issues', '/issues/', '/issues//'] -SECRET_KEY =os.getenv('APP_DEV_SECRET_KEY') +SECRET_KEY =os.getenv('SECRET_KEY') protected_routes = [ re.compile(r'^/greeting$'), diff --git a/db.py b/db.py index 491596d..6abec14 100644 --- a/db.py +++ b/db.py @@ -19,8 +19,8 @@ def __init__(self): from dotenv import load_dotenv load_dotenv() - SUPABASE_URL = os.getenv('APP_DEV_SUPABASE_URL') - SUPABASE_KEY = os.getenv('APP_DEV_SUPABASE_KEY') + SUPABASE_URL = os.getenv('SUPABASE_URL') + SUPABASE_KEY = os.getenv('SUPABASE_KEY') self.client: Client = create_client(SUPABASE_URL, SUPABASE_KEY) SupabaseInterface._instance = self else: diff --git a/utils.py b/utils.py index b0730c5..0a8e085 100644 --- a/utils.py +++ b/utils.py @@ -4,7 +4,7 @@ from dateutil import parser -GITHUB_TOKEN =os.getenv('APP_DEV_GITHUB_TOKEN') +GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') headers = { "Accept": "application/vnd.github+json", @@ -57,7 +57,7 @@ def get_issue_details(issue_url): -def group_by_owner(data): +def group_by_owner(data): res = [] for record in data: org_data = find_org_data(record['issue_url']) From 001c4e1bd942aca7be60527e292888fa932132f0 Mon Sep 17 00:00:00 2001 From: S SASIKUMAR <162121972+sasi2312@users.noreply.github.com> Date: Wed, 5 Jun 2024 14:47:31 +0530 Subject: [PATCH 50/76] Update ci.yml --- .github/workflows/ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4143b2a..cbb9c27 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -64,6 +64,7 @@ jobs: TAG_ENV_COMMIT: ${{ needs.set_vars.outputs.TAG_ENV_COMMIT }} SUPABASE_URL: ${{ vars[format('APP_{0}_SUPABASE_URL', needs.set_vars.outputs.APP_ENV)] }} SUPABASE_KEY: ${{ secrets[format('APP_{0}_SUPABASE_KEY', needs.set_vars.outputs.APP_ENV)] }} + SECRET_KEY: ${{ secrets[format('APP_{0}_SECRET_KEY', needs.set_vars.outputs.APP_ENV)] }} steps: - name: Checkout code uses: actions/checkout@v2 @@ -85,6 +86,7 @@ jobs: run: | echo "SUPABASE_URL=${SUPABASE_URL}" >> .env echo "SUPABASE_KEY=${SUPABASE_KEY}" >> .env + echo "SECRET_KEY=${SECRET_KEY}" >> .env mv .env ${{ env.DOT_ENV_FILE_NAME }} - name: Copy env file to DEV Server @@ -141,4 +143,4 @@ jobs: - name: Deploy to Prod environment if: github.ref == 'refs/heads/main' - run: echo "Deploying to Kubernetes" \ No newline at end of file + run: echo "Deploying to Kubernetes" From a536f4f4c916648074cef9c57b1db4302d0a7ea0 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 7 Jun 2024 15:27:19 +0530 Subject: [PATCH 51/76] api issue fix --- app.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/app.py b/app.py index b378096..b3ea0d8 100644 --- a/app.py +++ b/app.py @@ -268,19 +268,19 @@ def get_issues_by_owner_id(owner, issue): # week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url) # mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} - - if "Weekly Goals" in val['body_text'] and not w_goal_url: - w_goal_url = val['body_text'] - plain_text_body = markdown2.markdown(val['body_text']) - - tasks = re.findall(r'\[(x| )\]', plain_text_body) - total_tasks = len(tasks) - completed_tasks = tasks.count('x') - - avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + if val['body_text']: + if "Weekly Goals" in val['body_text'] and not w_goal_url: + w_goal_url = val['body_text'] + plain_text_body = markdown2.markdown(val['body_text']) - if "Weekly Learnings" in val['body_text'] and not w_learn_url: - w_learn_url = val['body_text'] + tasks = re.findall(r'\[(x| )\]', plain_text_body) + total_tasks = len(tasks) + completed_tasks = tasks.count('x') + + avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + + if "Weekly Learnings" in val['body_text'] and not w_learn_url: + w_learn_url = val['body_text'] # mentors = mentors_data['mentors'] # ment_usernames = mentors_data['mentor_usernames'] @@ -299,7 +299,7 @@ def get_issues_by_owner_id(owner, issue): "weekly_goals_html": w_goal_url, "weekly_learnings_html": w_learn_url, "overall_progress": avg, - "issue_url":val['html_url'], + "issue_url":val['html_issue_url'], "pr_details":None } From 7ecd8eb7116cd7cd4d09f40f6a1f055e895d1827 Mon Sep 17 00:00:00 2001 From: sasi Date: Mon, 10 Jun 2024 15:48:41 +0530 Subject: [PATCH 52/76] auth added --- app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app.py b/app.py index b3ea0d8..086d896 100644 --- a/app.py +++ b/app.py @@ -57,6 +57,7 @@ def decorated_function(*args, **kwargs): @app.route('/get-data', methods=['GET']) @cross_origin(supports_credentials=True) +@require_secret_key def get_data(): """ Fetch data from Supabase. @@ -86,6 +87,7 @@ def get_data(): @app.route('/v1/issues', methods=['GET']) +@require_secret_key def v1get_issues(): try: response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').execute() From b83977d30e43e1c495282eb84301b1a3faf913e1 Mon Sep 17 00:00:00 2001 From: sasi Date: Wed, 12 Jun 2024 19:48:06 +0530 Subject: [PATCH 53/76] v2 apis --- app.py | 5 +++ v2_app.py | 94 +++++++++++++++++++++++++++++++++++++++++++++++++++++ v2_utils.py | 84 +++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 183 insertions(+) create mode 100644 v2_app.py create mode 100644 v2_utils.py diff --git a/app.py b/app.py index 086d896..e5b785a 100644 --- a/app.py +++ b/app.py @@ -6,6 +6,7 @@ from utils import * from flask_cors import CORS,cross_origin from functools import wraps +from v2_app import v2 app = Flask(__name__) @@ -344,5 +345,9 @@ def check_secret_key(): break # Stop checking if the current route matches + +# Register the v2 Blueprint +app.register_blueprint(v2, url_prefix='/v2') + if __name__ == '__main__': app.run(debug=True) \ No newline at end of file diff --git a/v2_app.py b/v2_app.py new file mode 100644 index 0000000..d75f996 --- /dev/null +++ b/v2_app.py @@ -0,0 +1,94 @@ +import traceback,re +from flask import Blueprint, jsonify, request +import markdown2 +from app import require_secret_key +from db import SupabaseInterface +from utils import determine_week +from v2_utils import define_mentors_data, week_data_formatter + +v2 = Blueprint('v2', __name__) + + +@v2.route('/issues//', methods=['GET']) +@require_secret_key +def get_issues_by_owner_id_v2(owner, issue): + try: + SUPABASE_DB = SupabaseInterface().get_instance() + response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute() + if not response.data: + return jsonify({'error': "No data found"}), 500 + data = response.data + + final_data = [] + w_learn_url,w_goal_url,avg,cont_details = None,None,None,None + + for val in data: + issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) + # week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url) + # mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} + + if val['body_text']: + if "Weekly Goals" in val['body_text'] and not w_goal_url: + w_goal_url = val['body_text'] + plain_text_body = markdown2.markdown(val['body_text']) + + tasks = re.findall(r'\[(x| )\]', plain_text_body) + total_tasks = len(tasks) + completed_tasks = tasks.count('x') + + avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + + if "Weekly Learnings" in val['body_text'] and not w_learn_url: + w_learn_url = val['body_text'] + plain_text_wurl = markdown2.markdown(val['body_text']) + + + # mentors = mentors_data['mentors'] + # ment_usernames = mentors_data['mentor_usernames'] + if not cont_details: + cont_details = SUPABASE_DB.client.table('dmp_issues').select('*').eq('repo_url',val['dmp_issue_url']).execute().data + + + week_data = week_data_formatter(plain_text_body) + res = { + "name": owner, + "description": val['description'], + "mentor": define_mentors_data(val['mentor_name']), + "mentor_id": val['mentor_id'] , + "contributor":define_mentors_data(cont_details[0]['contributor_name']), + "contributor_id": cont_details[0]['contributor_id'], + "org": define_mentors_data(val['owner'])[0] if val['owner'] else [], + "weekly_goals_html": w_goal_url, + "weekly_learnings_html": w_learn_url, + "overall_progress": week_data[1], + "issue_url":val['html_issue_url'], + "pr_details":None, + "weekly_goals":week_data[0], + "weekly_learns":week_data_formatter(plain_text_wurl)[0] + } + + pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('repo', val['repo']).execute() + transformed = {"pr_details": []} + if pr_Data.data: + for pr in pr_Data.data: + transformed["pr_details"].append({ + "id": pr.get("pr_id", ""), + "name": pr.get("meta_data", ""), + "week": determine_week(pr['created_at']), + "link": pr.get("html_url", ""), + "status": pr.get("status", ""), + }) + + res['pr_details'] = transformed['pr_details'] + + # Adding each week as a separate key + # for week in weekby_avgs: + # res.update(week) + + # final_data.append(res) + + return jsonify(res),200 + + except Exception as e: + error_traceback = traceback.format_exc() + return jsonify({'error': str(e), 'traceback': error_traceback}), 500 diff --git a/v2_utils.py b/v2_utils.py new file mode 100644 index 0000000..8f60776 --- /dev/null +++ b/v2_utils.py @@ -0,0 +1,84 @@ +import logging,re,markdown2 + +# Func to create name and link for all mentors and contributors +def define_mentors_data(mentors): + try: + res = [] + + if type(mentors) == list: + for ment in mentors: + val = {} + val['name'] = ment + val['link'] = "https://github.com/" + ment + res.append(val) + if type(mentors) == str: + val = {} + val['name'] = mentors + val['link'] = "https://github.com/" + mentors + res.append(val) + + return res + + except Exception as e: + logging.info(f"{e}---define_mentors") + return [] + + + +def week_data_formatter(html_content): + try: + # Find all weeks + week_matches = re.findall(r'

(Week \d+)

', html_content) + tasks_per_week = re.findall(r'

Week \d+

\s*
    (.*?)
', html_content, re.DOTALL) + + weekly_updates = [] + total_weighted_progress = 0 + total_tasks = 0 + + for i, week in enumerate(week_matches): + try: + task_list_html = tasks_per_week[i] + except Exception as e: + task_list_html = "" + + tasks = re.findall(r'\[(x| )\] (.*?)', task_list_html, re.DOTALL) + + total_tasks = len(tasks) + completed_tasks = sum(1 for task in tasks if task[0] == 'x') + task_list = [{"content":i[1],"checked":True if i[0]=='x' else False} for i in tasks] + + + avg = round((completed_tasks / total_tasks) * 100) if total_tasks != 0 else 0 + + weekly_updates.append({ + 'week': i+1, + # 'total_tasks': total_tasks, + # 'completed_tasks': completed_tasks, + 'progress': avg, + 'tasks':task_list + }) + + num_tasks = len(task_list) + progress = avg + + total_weighted_progress += progress * num_tasks + total_tasks += num_tasks + + response = { + 'number_of_weeks': len(week_matches), + 'weekly_updates': weekly_updates + } + + #FIND OVERALL PROGRESS + + overall_progress = (total_weighted_progress / total_tasks) if total_tasks > 0 else 0 + # return round(overall_progress, 2) + + return weekly_updates,overall_progress + + + except Exception as e: + return [],0 + + + \ No newline at end of file From d1b9bde9feb6a1c20c58fb303d94aa0f5c4aba32 Mon Sep 17 00:00:00 2001 From: sasi Date: Wed, 12 Jun 2024 19:54:14 +0530 Subject: [PATCH 54/76] import issue fix --- app.py | 10 ---------- utils.py | 15 +++++++++++++++ v2_app.py | 2 +- 3 files changed, 16 insertions(+), 11 deletions(-) diff --git a/app.py b/app.py index e5b785a..27f9267 100644 --- a/app.py +++ b/app.py @@ -5,7 +5,6 @@ import re,os,traceback from utils import * from flask_cors import CORS,cross_origin -from functools import wraps from v2_app import v2 @@ -46,15 +45,6 @@ def greeting(): -# Custom decorator to validate secret key -def require_secret_key(f): - @wraps(f) - def decorated_function(*args, **kwargs): - secret_key = request.headers.get('X-Secret-Key') - if secret_key != SECRET_KEY: - return jsonify({'message': 'Unauthorized access'}), 401 - return f(*args, **kwargs) - return decorated_function @app.route('/get-data', methods=['GET']) @cross_origin(supports_credentials=True) diff --git a/utils.py b/utils.py index 0a8e085..37c1e54 100644 --- a/utils.py +++ b/utils.py @@ -2,9 +2,13 @@ from collections import defaultdict from datetime import datetime, timedelta from dateutil import parser +from flask import jsonify,request +from functools import wraps GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') +SECRET_KEY =os.getenv('SECRET_KEY') + headers = { "Accept": "application/vnd.github+json", @@ -14,6 +18,17 @@ +# Custom decorator to validate secret key +def require_secret_key(f): + @wraps(f) + def decorated_function(*args, **kwargs): + secret_key = request.headers.get('X-Secret-Key') + if secret_key != SECRET_KEY: + return jsonify({'message': 'Unauthorized access'}), 401 + return f(*args, **kwargs) + return decorated_function + + def find_org_data(url): try: url_parts = url.split("/") diff --git a/v2_app.py b/v2_app.py index d75f996..6ca4c4a 100644 --- a/v2_app.py +++ b/v2_app.py @@ -1,7 +1,7 @@ import traceback,re from flask import Blueprint, jsonify, request import markdown2 -from app import require_secret_key +from utils import require_secret_key from db import SupabaseInterface from utils import determine_week from v2_utils import define_mentors_data, week_data_formatter From 0ed597ed6206b14579096780e55a4924809ccca6 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 13 Jun 2024 17:26:54 +0530 Subject: [PATCH 55/76] overall percentage finder added --- v2_utils.py | 102 ++++++++++++++++++++++++++++++++-------------------- 1 file changed, 64 insertions(+), 38 deletions(-) diff --git a/v2_utils.py b/v2_utils.py index 8f60776..efe164d 100644 --- a/v2_utils.py +++ b/v2_utils.py @@ -25,60 +25,86 @@ def define_mentors_data(mentors): -def week_data_formatter(html_content): +def week_data_formatter(html_content,type): try: # Find all weeks week_matches = re.findall(r'

(Week \d+)

', html_content) tasks_per_week = re.findall(r'

Week \d+

\s*
    (.*?)
', html_content, re.DOTALL) weekly_updates = [] - total_weighted_progress = 0 total_tasks = 0 - for i, week in enumerate(week_matches): - try: - task_list_html = tasks_per_week[i] - except Exception as e: - task_list_html = "" - - tasks = re.findall(r'\[(x| )\] (.*?)', task_list_html, re.DOTALL) - - total_tasks = len(tasks) - completed_tasks = sum(1 for task in tasks if task[0] == 'x') - task_list = [{"content":i[1],"checked":True if i[0]=='x' else False} for i in tasks] + if type == "Learnings": + for i, week in enumerate(week_matches): + + try: + task_list_html = tasks_per_week[i] + except Exception as e: + task_list_html = "" + + weekly_updates.append({ + 'week': i+1, + 'content':task_list_html + }) + return weekly_updates + + else: + for i, week in enumerate(week_matches): + try: + task_list_html = tasks_per_week[i] + except Exception as e: + task_list_html = "" + + tasks = re.findall(r'\[(x| )\] (.*?)', task_list_html, re.DOTALL) + + total_tasks = len(tasks) + completed_tasks = sum(1 for task in tasks if task[0] == 'x') + task_list = [{"content":i[1],"checked":True if i[0]=='x' else False} for i in tasks] + + + avg = round((completed_tasks / total_tasks) * 100) if total_tasks != 0 else 0 + + weekly_updates.append({ + 'week': i+1, + # 'total_tasks': total_tasks, + # 'completed_tasks': completed_tasks, + 'progress': avg, + 'tasks':task_list + }) + - avg = round((completed_tasks / total_tasks) * 100) if total_tasks != 0 else 0 - - weekly_updates.append({ - 'week': i+1, - # 'total_tasks': total_tasks, - # 'completed_tasks': completed_tasks, - 'progress': avg, - 'tasks':task_list - }) + + response = { + 'number_of_weeks': len(week_matches), + 'weekly_updates': weekly_updates + } - num_tasks = len(task_list) - progress = avg + #FIND OVERALL PROGRESS - total_weighted_progress += progress * num_tasks - total_tasks += num_tasks + - response = { - 'number_of_weeks': len(week_matches), - 'weekly_updates': weekly_updates - } - - #FIND OVERALL PROGRESS - - overall_progress = (total_weighted_progress / total_tasks) if total_tasks > 0 else 0 - # return round(overall_progress, 2) - - return weekly_updates,overall_progress + return weekly_updates except Exception as e: - return [],0 + return [] + + +def calculate_overall_progress(weekly_updates, total_weeks): + try: + # Calculate total progress for the provided weeks + provided_weeks = len(weekly_updates) + total_progress = sum(week['progress'] for week in weekly_updates) + + # Calculate average progress based on provided weeks + average_progress = total_progress / provided_weeks if provided_weeks else 0 + # Calculate overall progress for the total number of weeks + overall_progress = average_progress * (total_weeks / provided_weeks) if provided_weeks else 0 + + return round(overall_progress, 2) + except Exception as e: + return 0 \ No newline at end of file From 964fe5b91189b7ac2fc4a5a03c08fe8233baaf88 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 13 Jun 2024 17:28:59 +0530 Subject: [PATCH 56/76] pr data by issue num added --- v2_app.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/v2_app.py b/v2_app.py index 6ca4c4a..46b514c 100644 --- a/v2_app.py +++ b/v2_app.py @@ -4,7 +4,7 @@ from utils import require_secret_key from db import SupabaseInterface from utils import determine_week -from v2_utils import define_mentors_data, week_data_formatter +from v2_utils import calculate_overall_progress, define_mentors_data, week_data_formatter v2 = Blueprint('v2', __name__) @@ -49,7 +49,7 @@ def get_issues_by_owner_id_v2(owner, issue): cont_details = SUPABASE_DB.client.table('dmp_issues').select('*').eq('repo_url',val['dmp_issue_url']).execute().data - week_data = week_data_formatter(plain_text_body) + week_data = week_data_formatter(plain_text_body,"Goals") res = { "name": owner, "description": val['description'], @@ -60,14 +60,14 @@ def get_issues_by_owner_id_v2(owner, issue): "org": define_mentors_data(val['owner'])[0] if val['owner'] else [], "weekly_goals_html": w_goal_url, "weekly_learnings_html": w_learn_url, - "overall_progress": week_data[1], + "overall_progress":calculate_overall_progress(week_data,12), "issue_url":val['html_issue_url'], "pr_details":None, - "weekly_goals":week_data[0], - "weekly_learns":week_data_formatter(plain_text_wurl)[0] + "weekly_goals":week_data, + "weekly_learns":week_data_formatter(plain_text_wurl,"Learnings") } - pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('repo', val['repo']).execute() + pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('repo', val['repo']).eq('pr_number',issue).execute() transformed = {"pr_details": []} if pr_Data.data: for pr in pr_Data.data: From 4156ab5062209c35ef42da4b7b66c02b94b47ea2 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 13 Jun 2024 18:23:49 +0530 Subject: [PATCH 57/76] cont_id removed --- v2_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/v2_app.py b/v2_app.py index 46b514c..80277cc 100644 --- a/v2_app.py +++ b/v2_app.py @@ -56,7 +56,7 @@ def get_issues_by_owner_id_v2(owner, issue): "mentor": define_mentors_data(val['mentor_name']), "mentor_id": val['mentor_id'] , "contributor":define_mentors_data(cont_details[0]['contributor_name']), - "contributor_id": cont_details[0]['contributor_id'], + # "contributor_id": cont_details[0]['contributor_id'], "org": define_mentors_data(val['owner'])[0] if val['owner'] else [], "weekly_goals_html": w_goal_url, "weekly_learnings_html": w_learn_url, From b4cc8ffdd175052caea87b51053bc9c0f0863535 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 13 Jun 2024 18:34:31 +0530 Subject: [PATCH 58/76] issue fix --- v2_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/v2_app.py b/v2_app.py index 80277cc..0e67178 100644 --- a/v2_app.py +++ b/v2_app.py @@ -20,7 +20,7 @@ def get_issues_by_owner_id_v2(owner, issue): data = response.data final_data = [] - w_learn_url,w_goal_url,avg,cont_details = None,None,None,None + w_learn_url,w_goal_url,avg,cont_details,plain_text_body,plain_text_wurl = None,None,None,None,None,None for val in data: issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) From 14c83313ac62637a65795047cc5612daa7824c05 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 13 Jun 2024 20:55:56 +0530 Subject: [PATCH 59/76] issue fix --- v2_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/v2_app.py b/v2_app.py index 0e67178..288fdaa 100644 --- a/v2_app.py +++ b/v2_app.py @@ -67,7 +67,7 @@ def get_issues_by_owner_id_v2(owner, issue): "weekly_learns":week_data_formatter(plain_text_wurl,"Learnings") } - pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('repo', val['repo']).eq('pr_number',issue).execute() + pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('repo', val['repo']).eq('issue_number_title',issue).execute() transformed = {"pr_details": []} if pr_Data.data: for pr in pr_Data.data: From 9e4a82ac1dd7d7957e9fd7480a7827fb2b7cb9a8 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 13 Jun 2024 21:20:25 +0530 Subject: [PATCH 60/76] overall prog changes --- v2_utils.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/v2_utils.py b/v2_utils.py index efe164d..310df37 100644 --- a/v2_utils.py +++ b/v2_utils.py @@ -91,20 +91,24 @@ def week_data_formatter(html_content,type): return [] -def calculate_overall_progress(weekly_updates, total_weeks): +def calculate_overall_progress(weekly_updates, default_weeks=12): try: - # Calculate total progress for the provided weeks + total_progress = 0 provided_weeks = len(weekly_updates) - total_progress = sum(week['progress'] for week in weekly_updates) - # Calculate average progress based on provided weeks - average_progress = total_progress / provided_weeks if provided_weeks else 0 + # Sum the progress of each provided week + for week in weekly_updates: + total_progress += week['progress'] - # Calculate overall progress for the total number of weeks - overall_progress = average_progress * (total_weeks / provided_weeks) if provided_weeks else 0 + # Add zero progress for the remaining weeks to reach the default weeks + total_weeks = default_weeks + remaining_weeks = default_weeks - provided_weeks + total_progress += remaining_weeks * 0 # Adding zero progress for the remaining weeks - return round(overall_progress, 2) + # Calculate the average progress over the total number of weeks + overall_progress = total_progress / total_weeks if total_weeks > 0 else 0 + + return round(overall_progress, 2) except Exception as e: return 0 - \ No newline at end of file From 2e3de4296696f42e7b4faee412f027608d74d937 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 14 Jun 2024 15:05:23 +0530 Subject: [PATCH 61/76] status code changes --- app.py | 14 +++++++------- v2_app.py | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/app.py b/app.py index 27f9267..61a5850 100644 --- a/app.py +++ b/app.py @@ -73,7 +73,7 @@ def get_data(): data = response.data return jsonify(data) except Exception as e: - return jsonify({'error': str(e)}), 500 + return jsonify({'error': str(e)}), 200 @@ -99,7 +99,7 @@ def v1get_issues(): except Exception as e: error_traceback = traceback.format_exc() - return jsonify({'error': str(e), 'traceback': error_traceback}), 500 + return jsonify({'error': str(e), 'traceback': error_traceback}), 200 @app.route('/issues', methods=['GET']) @@ -168,7 +168,7 @@ def get_issues(): except Exception as e: error_traceback = traceback.format_exc() - return jsonify({'error': str(e), 'traceback': error_traceback}), 500 + return jsonify({'error': str(e), 'traceback': error_traceback}), 200 @app.route('/issues/', methods=['GET']) @cross_origin(supports_credentials=True) @@ -201,13 +201,13 @@ def get_issues_by_owner(owner): try: response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('owner', owner).order('comment_updated_at', desc=True).execute() if not response.data: - return jsonify({'error': "No data found"}), 500 + return jsonify({'error': "No data found"}), 200 data = response.data[0] return jsonify({"name": data['org_name'], "description": data['org_description']}) except Exception as e: error_traceback = traceback.format_exc() - return jsonify({'error': str(e), 'traceback': error_traceback}), 500 + return jsonify({'error': str(e), 'traceback': error_traceback}), 200 @@ -250,7 +250,7 @@ def get_issues_by_owner_id(owner, issue): SUPABASE_DB = SupabaseInterface().get_instance() response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute() if not response.data: - return jsonify({'error': "No data found"}), 500 + return jsonify({'error': "No data found"}), 200 data = response.data final_data = [] @@ -320,7 +320,7 @@ def get_issues_by_owner_id(owner, issue): except Exception as e: error_traceback = traceback.format_exc() - return jsonify({'error': str(e), 'traceback': error_traceback}), 500 + return jsonify({'error': str(e), 'traceback': error_traceback}), 200 diff --git a/v2_app.py b/v2_app.py index 288fdaa..9aa582d 100644 --- a/v2_app.py +++ b/v2_app.py @@ -16,7 +16,7 @@ def get_issues_by_owner_id_v2(owner, issue): SUPABASE_DB = SupabaseInterface().get_instance() response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute() if not response.data: - return jsonify({'error': "No data found"}), 500 + return jsonify({'error': "No data found"}), 200 data = response.data final_data = [] @@ -91,4 +91,4 @@ def get_issues_by_owner_id_v2(owner, issue): except Exception as e: error_traceback = traceback.format_exc() - return jsonify({'error': str(e), 'traceback': error_traceback}), 500 + return jsonify({'error': str(e), 'traceback': error_traceback}), 200 From f5cfe73a0f026cb0b87c0e54d1aad8bdcc3c6def Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 14 Jun 2024 20:32:10 +0530 Subject: [PATCH 62/76] find org from org_name --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 61a5850..c5d1b45 100644 --- a/app.py +++ b/app.py @@ -199,7 +199,7 @@ def get_issues_by_owner(owner): type: string """ try: - response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('owner', owner).order('comment_updated_at', desc=True).execute() + response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('org_name', owner).order('comment_updated_at', desc=True).execute() if not response.data: return jsonify({'error': "No data found"}), 200 data = response.data[0] From 2fea2861e7c1e88f450ecfe1992fa49a4cd78883 Mon Sep 17 00:00:00 2001 From: Vedant Khairnar Date: Mon, 17 Jun 2024 11:13:44 +0530 Subject: [PATCH 63/76] [Fix] as per the new DB structure --- .gitignore | 2 +- app.py | 91 ++++++++++++++++++++++++++++++----------------------- v2_app.py | 53 ++++++++++++++++--------------- v2_utils.py | 90 +++++++++++++++++++++------------------------------- 4 files changed, 114 insertions(+), 122 deletions(-) diff --git a/.gitignore b/.gitignore index bf3ff3d..b2d0cb0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ dmp_2/__pycache__/* .env env/* - +venv __pycache__/* diff --git a/app.py b/app.py index c5d1b45..c7f0d62 100644 --- a/app.py +++ b/app.py @@ -127,88 +127,99 @@ def get_issues(): type: string """ try: - dmp_issue =SupabaseInterface().get_instance().client.table('dmp_issues').select('*').execute().data + # Fetch all issues with their details + dmp_issues = SupabaseInterface().get_instance().client.table('dmp_issues').select('*').execute().data - updated_issues = [] - - for i in dmp_issue: - val = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('dmp_issue_url',i['repo_url']).execute().data - if val!=[]: - i['issues'] = val[0] #append first obj ie all are reder same issue - i['org_id'] = val[0]['org_id'] - i['org_name'] = val[0]['org_name'] - - updated_issues.append(i) - - # Create a defaultdict of lists + # Create a defaultdict of lists to group issues by 'org_id' grouped_data = defaultdict(list) - # Group data by 'org_name' - for item in updated_issues: - grouped_data[item['org_name']].append(item) + for issue in dmp_issues: + # Fetch organization details for the issue + org_details = SupabaseInterface().get_instance().client.table('dmp_orgs').select('*').eq('id', issue['org_id']).execute().data + if org_details: + issue['org_name'] = org_details[0]['name'] + + grouped_data[issue['org_id']].append(issue) + # Prepare response in the required format response = [] - for org_name, items in grouped_data.items(): + for org_id, items in grouped_data.items(): issues = [ { - "html_url": item['issues']['html_issue_url'], - "id": item['issues']['comment_id'], - "issue_number": item['issues']['issue_number'], - "name": item['issues']['title'] + "id": item['issue_number'], + "name": item['title'] } for item in items ] response.append({ - "issues": issues, - "org_id": items[0]['org_id'], - "org_name": org_name + "org_id": org_id, + "org_name": items[0]['org_name'], # Assuming all items in the group have the same org_name + "issues": issues }) - return jsonify(response) + return jsonify({"issues": response}) except Exception as e: error_traceback = traceback.format_exc() - return jsonify({'error': str(e), 'traceback': error_traceback}), 200 - + return jsonify({'error': str(e), 'traceback': error_traceback}), 500 + @app.route('/issues/', methods=['GET']) @cross_origin(supports_credentials=True) @require_secret_key def get_issues_by_owner(owner): """ - Fetch issues by owner. + Fetch organization details by owner's GitHub URL. --- parameters: - name: owner in: path type: string required: true - description: The owner of the issues + description: The owner of the GitHub URL (e.g., organization owner) responses: 200: - description: Issues fetched successfully + description: Organization details fetched successfully schema: - type: array - items: - type: object + type: object + properties: + name: + type: string + description: Name of the organization + description: + type: string + description: Description of the organization + 404: + description: Organization not found + schema: + type: object + properties: + error: + type: string + description: Error message 500: - description: Error fetching issues + description: Error fetching organization details schema: type: object properties: error: type: string + description: Error message """ try: - response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('org_name', owner).order('comment_updated_at', desc=True).execute() + # Construct the GitHub URL based on the owner parameter + org_link = f"https://github.com/{owner}" + + # Fetch organization details from dmp_orgs table + response = SupabaseInterface().get_instance().client.table('dmp_orgs').select('name', 'description').eq('link', org_link).execute() + if not response.data: - return jsonify({'error': "No data found"}), 200 - data = response.data[0] - return jsonify({"name": data['org_name'], "description": data['org_description']}) + return jsonify({'error': "Organization not found"}), 404 + + return jsonify(response.data) except Exception as e: error_traceback = traceback.format_exc() - return jsonify({'error': str(e), 'traceback': error_traceback}), 200 - + return jsonify({'error': str(e), 'traceback': error_traceback}), 500 @app.route('/issues//', methods=['GET']) diff --git a/v2_app.py b/v2_app.py index 9aa582d..a731c1e 100644 --- a/v2_app.py +++ b/v2_app.py @@ -4,7 +4,7 @@ from utils import require_secret_key from db import SupabaseInterface from utils import determine_week -from v2_utils import calculate_overall_progress, define_mentors_data, week_data_formatter +from v2_utils import calculate_overall_progress, define_link_data, week_data_formatter v2 = Blueprint('v2', __name__) @@ -12,18 +12,28 @@ @v2.route('/issues//', methods=['GET']) @require_secret_key def get_issues_by_owner_id_v2(owner, issue): - try: + try: SUPABASE_DB = SupabaseInterface().get_instance() - response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute() + # Fetch issue updates based on owner and issue number + + url = f"https://github.com/{owner}" + dmp_issue_id = SUPABASE_DB.client.table('dmp_issues').select('*').like('issue_url', f'%{url}%').eq('issue_number', issue).execute() + if not dmp_issue_id.data: + return jsonify({'error': "No data found"}), 500 + + dmp_issue_id = dmp_issue_id.data[0] + response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('dmp_id', dmp_issue_id['id']).execute() + if not response.data: - return jsonify({'error': "No data found"}), 200 + return jsonify({'error': "No data found"}), 500 + data = response.data final_data = [] w_learn_url,w_goal_url,avg,cont_details,plain_text_body,plain_text_wurl = None,None,None,None,None,None - + for val in data: - issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) + # issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) # week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url) # mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} @@ -31,11 +41,9 @@ def get_issues_by_owner_id_v2(owner, issue): if "Weekly Goals" in val['body_text'] and not w_goal_url: w_goal_url = val['body_text'] plain_text_body = markdown2.markdown(val['body_text']) - tasks = re.findall(r'\[(x| )\]', plain_text_body) total_tasks = len(tasks) completed_tasks = tasks.count('x') - avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 if "Weekly Learnings" in val['body_text'] and not w_learn_url: @@ -46,28 +54,27 @@ def get_issues_by_owner_id_v2(owner, issue): # mentors = mentors_data['mentors'] # ment_usernames = mentors_data['mentor_usernames'] if not cont_details: - cont_details = SUPABASE_DB.client.table('dmp_issues').select('*').eq('repo_url',val['dmp_issue_url']).execute().data - - + cont_details = dmp_issue_id['contributor_username'] week_data = week_data_formatter(plain_text_body,"Goals") + res = { "name": owner, - "description": val['description'], - "mentor": define_mentors_data(val['mentor_name']), - "mentor_id": val['mentor_id'] , - "contributor":define_mentors_data(cont_details[0]['contributor_name']), + "description": dmp_issue_id['description'], + "mentor": define_link_data(dmp_issue_id['mentor_username']), + "mentor_id": dmp_issue_id['mentor_username'] , + "contributor":define_link_data(cont_details), # "contributor_id": cont_details[0]['contributor_id'], - "org": define_mentors_data(val['owner'])[0] if val['owner'] else [], + "org": define_link_data(dmp_issue_id['mentor_username'])[0] if dmp_issue_id['mentor_username'] else [], "weekly_goals_html": w_goal_url, "weekly_learnings_html": w_learn_url, "overall_progress":calculate_overall_progress(week_data,12), - "issue_url":val['html_issue_url'], + "issue_url":dmp_issue_id['issue_url'], "pr_details":None, "weekly_goals":week_data, - "weekly_learns":week_data_formatter(plain_text_wurl,"Learnings") + "weekly_learnings":week_data_formatter(plain_text_wurl,"Learnings") } - pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('repo', val['repo']).eq('issue_number_title',issue).execute() + pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('dmp_id', dmp_issue_id['id']).eq('title',issue).execute() transformed = {"pr_details": []} if pr_Data.data: for pr in pr_Data.data: @@ -80,13 +87,7 @@ def get_issues_by_owner_id_v2(owner, issue): }) res['pr_details'] = transformed['pr_details'] - - # Adding each week as a separate key - # for week in weekby_avgs: - # res.update(week) - - # final_data.append(res) - + return jsonify(res),200 except Exception as e: diff --git a/v2_utils.py b/v2_utils.py index 310df37..5c9d55c 100644 --- a/v2_utils.py +++ b/v2_utils.py @@ -1,93 +1,72 @@ import logging,re,markdown2 # Func to create name and link for all mentors and contributors -def define_mentors_data(mentors): +def define_link_data(usernames): try: res = [] - - if type(mentors) == list: - for ment in mentors: + if type(usernames) == list: + for username in usernames: val = {} - val['name'] = ment - val['link'] = "https://github.com/" + ment + val['name'] = username + val['link'] = "https://github.com/" + username res.append(val) - if type(mentors) == str: + if type(usernames) == str: + if usernames[0]=="@": + usernames = usernames[1:] val = {} - val['name'] = mentors - val['link'] = "https://github.com/" + mentors + val['name'] = usernames + val['link'] = "https://github.com/" + usernames res.append(val) return res except Exception as e: - logging.info(f"{e}---define_mentors") + logging.info(f"{e}---define_link_data") return [] - -def week_data_formatter(html_content,type): + +def week_data_formatter(html_content, type): + try: - # Find all weeks - week_matches = re.findall(r'

(Week \d+)

', html_content) - tasks_per_week = re.findall(r'

Week \d+

\s*
    (.*?)
', html_content, re.DOTALL) - + # Use regex to find week titles (e.g., Week 1, Week 2) and their corresponding task lists + week_matches = re.findall(r'(Week \d+)', html_content) + tasks_per_week = re.split(r'Week \d+', html_content)[1:] # Split the content by weeks and skip the first empty split + weekly_updates = [] - total_tasks = 0 - + if type == "Learnings": for i, week in enumerate(week_matches): - - try: - task_list_html = tasks_per_week[i] - except Exception as e: - task_list_html = "" - + task_list_html = tasks_per_week[i] if i < len(tasks_per_week) else "" weekly_updates.append({ - 'week': i+1, - 'content':task_list_html + 'week': i + 1, + 'content': task_list_html.strip() }) - return weekly_updates - - else: + + else: for i, week in enumerate(week_matches): - try: - task_list_html = tasks_per_week[i] - except Exception as e: - task_list_html = "" + task_list_html = tasks_per_week[i] if i < len(tasks_per_week) else "" - tasks = re.findall(r'\[(x| )\] (.*?)', task_list_html, re.DOTALL) + # Adjust regex to capture tasks regardless of the tags around them + tasks = re.findall(r'\[(x|X| )\]\s*(.*?)', task_list_html, re.DOTALL) total_tasks = len(tasks) - completed_tasks = sum(1 for task in tasks if task[0] == 'x') - task_list = [{"content":i[1],"checked":True if i[0]=='x' else False} for i in tasks] - + completed_tasks = sum(1 for task in tasks if task[0] in ['x', 'X']) + task_list = [{"content": task[1].strip(), "checked": task[0] in ['x', 'X']} for task in tasks] avg = round((completed_tasks / total_tasks) * 100) if total_tasks != 0 else 0 weekly_updates.append({ - 'week': i+1, - # 'total_tasks': total_tasks, - # 'completed_tasks': completed_tasks, + 'week': i + 1, 'progress': avg, - 'tasks':task_list + 'tasks': task_list }) - - - - response = { - 'number_of_weeks': len(week_matches), - 'weekly_updates': weekly_updates - } - - #FIND OVERALL PROGRESS - - return weekly_updates - - + except Exception as e: + print(f"Error: {e}") return [] @@ -98,7 +77,7 @@ def calculate_overall_progress(weekly_updates, default_weeks=12): # Sum the progress of each provided week for week in weekly_updates: - total_progress += week['progress'] + total_progress += week.get('progress', 0) # Add zero progress for the remaining weeks to reach the default weeks total_weeks = default_weeks @@ -110,5 +89,6 @@ def calculate_overall_progress(weekly_updates, default_weeks=12): return round(overall_progress, 2) except Exception as e: + print(f"Error: {e}") return 0 \ No newline at end of file From 3250a5e0b9b2dd498fafa9411c95cc6dd36f4e26 Mon Sep 17 00:00:00 2001 From: sasi Date: Mon, 17 Jun 2024 11:54:13 +0530 Subject: [PATCH 64/76] 3rd api fix --- v2_app.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/v2_app.py b/v2_app.py index a731c1e..a6cbd11 100644 --- a/v2_app.py +++ b/v2_app.py @@ -32,21 +32,22 @@ def get_issues_by_owner_id_v2(owner, issue): final_data = [] w_learn_url,w_goal_url,avg,cont_details,plain_text_body,plain_text_wurl = None,None,None,None,None,None + for val in data: # issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) # week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url) # mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} if val['body_text']: - if "Weekly Goals" in val['body_text'] and not w_goal_url: + if ("Weekly Goals" in val['body_text'] and not w_goal_url) and ("@"+val['created_by'].lower() == dmp_issue_id['mentor_username'].lower()): w_goal_url = val['body_text'] plain_text_body = markdown2.markdown(val['body_text']) tasks = re.findall(r'\[(x| )\]', plain_text_body) total_tasks = len(tasks) completed_tasks = tasks.count('x') avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 - - if "Weekly Learnings" in val['body_text'] and not w_learn_url: + + if ("Weekly Learnings" in val['body_text'] and not w_learn_url) and ((val['created_by'] == dmp_issue_id['contributor_username'])): w_learn_url = val['body_text'] plain_text_wurl = markdown2.markdown(val['body_text']) @@ -74,15 +75,16 @@ def get_issues_by_owner_id_v2(owner, issue): "weekly_learnings":week_data_formatter(plain_text_wurl,"Learnings") } - pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('dmp_id', dmp_issue_id['id']).eq('title',issue).execute() + + pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('dmp_id', dmp_issue_id['id']).like('title', f'%#{issue} - %').execute() transformed = {"pr_details": []} if pr_Data.data: for pr in pr_Data.data: transformed["pr_details"].append({ "id": pr.get("pr_id", ""), - "name": pr.get("meta_data", ""), + "name": pr.get("title", ""), "week": determine_week(pr['created_at']), - "link": pr.get("html_url", ""), + "link": pr.get("link", ""), "status": pr.get("status", ""), }) From cf47d0118a654e1e662a00793156a10cc3b6d570 Mon Sep 17 00:00:00 2001 From: sasi Date: Mon, 17 Jun 2024 17:48:53 +0530 Subject: [PATCH 65/76] key changes --- v2_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/v2_app.py b/v2_app.py index a6cbd11..99b1a88 100644 --- a/v2_app.py +++ b/v2_app.py @@ -59,7 +59,7 @@ def get_issues_by_owner_id_v2(owner, issue): week_data = week_data_formatter(plain_text_body,"Goals") res = { - "name": owner, + "name": dmp_issue_id['title'], "description": dmp_issue_id['description'], "mentor": define_link_data(dmp_issue_id['mentor_username']), "mentor_id": dmp_issue_id['mentor_username'] , From 34658d50a1f48b58d7664d15eaacd5f5876c52e4 Mon Sep 17 00:00:00 2001 From: sasi Date: Tue, 18 Jun 2024 13:41:45 +0530 Subject: [PATCH 66/76] v2 api changes in week leanrings html --- v2_app.py | 1 - v2_utils.py | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/v2_app.py b/v2_app.py index 99b1a88..95fbe8f 100644 --- a/v2_app.py +++ b/v2_app.py @@ -57,7 +57,6 @@ def get_issues_by_owner_id_v2(owner, issue): if not cont_details: cont_details = dmp_issue_id['contributor_username'] week_data = week_data_formatter(plain_text_body,"Goals") - res = { "name": dmp_issue_id['title'], "description": dmp_issue_id['description'], diff --git a/v2_utils.py b/v2_utils.py index 5c9d55c..8cd48b0 100644 --- a/v2_utils.py +++ b/v2_utils.py @@ -36,6 +36,9 @@ def week_data_formatter(html_content, type): weekly_updates = [] if type == "Learnings": + # tasks_per_week = re.split(r'

Week \d+

', html_content)[1:] + tasks_per_week = re.split(r'(<.*?>Week \d+<.*?>)', html_content)[1:] + tasks_per_week = [tasks_per_week[i] for i in range(1, len(tasks_per_week), 2)] for i, week in enumerate(week_matches): task_list_html = tasks_per_week[i] if i < len(tasks_per_week) else "" weekly_updates.append({ From 5a2fab01cf1f235dcf43d6d487447731416a8e7e Mon Sep 17 00:00:00 2001 From: sasi Date: Tue, 18 Jun 2024 18:29:21 +0530 Subject: [PATCH 67/76] condition removed in find week datas --- v2_app.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/v2_app.py b/v2_app.py index 95fbe8f..ed0667f 100644 --- a/v2_app.py +++ b/v2_app.py @@ -39,7 +39,8 @@ def get_issues_by_owner_id_v2(owner, issue): # mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} if val['body_text']: - if ("Weekly Goals" in val['body_text'] and not w_goal_url) and ("@"+val['created_by'].lower() == dmp_issue_id['mentor_username'].lower()): + # and ("@"+val['created_by'].lower() == dmp_issue_id['mentor_username'].lower()) + if ("Weekly Goals" in val['body_text'] and not w_goal_url): w_goal_url = val['body_text'] plain_text_body = markdown2.markdown(val['body_text']) tasks = re.findall(r'\[(x| )\]', plain_text_body) @@ -47,7 +48,7 @@ def get_issues_by_owner_id_v2(owner, issue): completed_tasks = tasks.count('x') avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 - if ("Weekly Learnings" in val['body_text'] and not w_learn_url) and ((val['created_by'] == dmp_issue_id['contributor_username'])): + if ("Weekly Learnings" in val['body_text'] and not w_learn_url): w_learn_url = val['body_text'] plain_text_wurl = markdown2.markdown(val['body_text']) From 21c2c06279680cb24f34c941fe0ea657d17f9f06 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 21 Jun 2024 13:47:52 +0530 Subject: [PATCH 68/76] issue api performance fix --- app.py | 46 +++++++++++++++------------------------------- 1 file changed, 15 insertions(+), 31 deletions(-) diff --git a/app.py b/app.py index c7f0d62..8d0bdce 100644 --- a/app.py +++ b/app.py @@ -127,37 +127,21 @@ def get_issues(): type: string """ try: - # Fetch all issues with their details - dmp_issues = SupabaseInterface().get_instance().client.table('dmp_issues').select('*').execute().data - - # Create a defaultdict of lists to group issues by 'org_id' - grouped_data = defaultdict(list) - for issue in dmp_issues: - # Fetch organization details for the issue - org_details = SupabaseInterface().get_instance().client.table('dmp_orgs').select('*').eq('id', issue['org_id']).execute().data - if org_details: - issue['org_name'] = org_details[0]['name'] - - grouped_data[issue['org_id']].append(issue) - - # Prepare response in the required format - response = [] - for org_id, items in grouped_data.items(): - issues = [ - { - "id": item['issue_number'], - "name": item['title'] - } - for item in items - ] - - response.append({ - "org_id": org_id, - "org_name": items[0]['org_name'], # Assuming all items in the group have the same org_name - "issues": issues - }) - - return jsonify({"issues": response}) + # Fetch all issues with their details + response = SupabaseInterface().get_instance().client.table('dmp_orgs').select('*, dmp_issues(*)').execute() + res = [] + + for org in response.data: + obj = {} + issues = org['dmp_issues'] + obj['org_id'] = org['id'] + obj['org_name'] = org['name'] + renamed_issues = [{"id": issue["issue_number"], "name": issue["title"]} for issue in issues] + obj['issues'] = renamed_issues + + res.append(obj) + + return jsonify({"issues": res}) except Exception as e: error_traceback = traceback.format_exc() From 790a6adf61febb08eb00a118ec2eab76aed3fd27 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 21 Jun 2024 13:55:11 +0530 Subject: [PATCH 69/76] issue by owner api fix --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 8d0bdce..9011ec6 100644 --- a/app.py +++ b/app.py @@ -194,7 +194,7 @@ def get_issues_by_owner(owner): org_link = f"https://github.com/{owner}" # Fetch organization details from dmp_orgs table - response = SupabaseInterface().get_instance().client.table('dmp_orgs').select('name', 'description').eq('link', org_link).execute() + response = SupabaseInterface().get_instance().client.table('dmp_orgs').select('name', 'description').eq('name', owner).execute() if not response.data: return jsonify({'error': "Organization not found"}), 404 From 7a0b845c83203b7d94a1b8e4606389e416c5b3e9 Mon Sep 17 00:00:00 2001 From: sasi Date: Mon, 24 Jun 2024 15:49:20 +0530 Subject: [PATCH 70/76] api change - get detail by repo owner --- v2_app.py | 10 +++++++++- v2_utils.py | 3 +++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/v2_app.py b/v2_app.py index ed0667f..c9a4025 100644 --- a/v2_app.py +++ b/v2_app.py @@ -15,8 +15,16 @@ def get_issues_by_owner_id_v2(owner, issue): try: SUPABASE_DB = SupabaseInterface().get_instance() # Fetch issue updates based on owner and issue number + + url = f"https://github.com/{owner}" + + # import pdb;pdb.set_trace() + actual_owner = SUPABASE_DB.client.table('dmp_orgs').select('id','name','repo_owner').like('name',owner).execute().data + repo_owner =actual_owner[0]['repo_owner'] if actual_owner else "" + #create url with repo owner + url = f"https://github.com/{repo_owner}" if repo_owner else None + - url = f"https://github.com/{owner}" dmp_issue_id = SUPABASE_DB.client.table('dmp_issues').select('*').like('issue_url', f'%{url}%').eq('issue_number', issue).execute() if not dmp_issue_id.data: return jsonify({'error': "No data found"}), 500 diff --git a/v2_utils.py b/v2_utils.py index 8cd48b0..5697aa6 100644 --- a/v2_utils.py +++ b/v2_utils.py @@ -7,9 +7,12 @@ def define_link_data(usernames): if type(usernames) == list: for username in usernames: val = {} + if username[0]=="@": + username = username[1:] val['name'] = username val['link'] = "https://github.com/" + username res.append(val) + if type(usernames) == str: if usernames[0]=="@": usernames = usernames[1:] From 33e204c5d7aa41b244f87cee097244fc35b1acd0 Mon Sep 17 00:00:00 2001 From: sasi Date: Tue, 25 Jun 2024 11:05:51 +0530 Subject: [PATCH 71/76] org details fix --- v2_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/v2_app.py b/v2_app.py index c9a4025..636bfd9 100644 --- a/v2_app.py +++ b/v2_app.py @@ -73,7 +73,7 @@ def get_issues_by_owner_id_v2(owner, issue): "mentor_id": dmp_issue_id['mentor_username'] , "contributor":define_link_data(cont_details), # "contributor_id": cont_details[0]['contributor_id'], - "org": define_link_data(dmp_issue_id['mentor_username'])[0] if dmp_issue_id['mentor_username'] else [], + "org": define_link_data(repo_owner), "weekly_goals_html": w_goal_url, "weekly_learnings_html": w_learn_url, "overall_progress":calculate_overall_progress(week_data,12), From 6687de8a3ad867865718521071ce623a516fca1f Mon Sep 17 00:00:00 2001 From: sasi Date: Tue, 25 Jun 2024 12:32:10 +0530 Subject: [PATCH 72/76] weekly goals from mentors --- v2_app.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/v2_app.py b/v2_app.py index 636bfd9..b8c16c7 100644 --- a/v2_app.py +++ b/v2_app.py @@ -46,9 +46,8 @@ def get_issues_by_owner_id_v2(owner, issue): # week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url) # mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} - if val['body_text']: - # and ("@"+val['created_by'].lower() == dmp_issue_id['mentor_username'].lower()) - if ("Weekly Goals" in val['body_text'] and not w_goal_url): + if val['body_text']: + if ("Weekly Goals" in val['body_text'] and not w_goal_url) and ("@"+val['created_by'].lower() == dmp_issue_id['mentor_username'].lower() if dmp_issue_id['mentor_username'] else None): w_goal_url = val['body_text'] plain_text_body = markdown2.markdown(val['body_text']) tasks = re.findall(r'\[(x| )\]', plain_text_body) From 2350586295abc0ee0512a3f2b0257cfd46e7133f Mon Sep 17 00:00:00 2001 From: sasi Date: Tue, 25 Jun 2024 13:32:39 +0530 Subject: [PATCH 73/76] id issues fix --- app.py | 2 +- v2_app.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index 9011ec6..5f8c60b 100644 --- a/app.py +++ b/app.py @@ -136,7 +136,7 @@ def get_issues(): issues = org['dmp_issues'] obj['org_id'] = org['id'] obj['org_name'] = org['name'] - renamed_issues = [{"id": issue["issue_number"], "name": issue["title"]} for issue in issues] + renamed_issues = [{"id": issue["id"], "name": issue["title"]} for issue in issues] obj['issues'] = renamed_issues res.append(obj) diff --git a/v2_app.py b/v2_app.py index b8c16c7..267cf13 100644 --- a/v2_app.py +++ b/v2_app.py @@ -25,7 +25,7 @@ def get_issues_by_owner_id_v2(owner, issue): url = f"https://github.com/{repo_owner}" if repo_owner else None - dmp_issue_id = SUPABASE_DB.client.table('dmp_issues').select('*').like('issue_url', f'%{url}%').eq('issue_number', issue).execute() + dmp_issue_id = SUPABASE_DB.client.table('dmp_issues').select('*').like('issue_url', f'%{url}%').eq('id', issue).execute() if not dmp_issue_id.data: return jsonify({'error': "No data found"}), 500 From 35d950a22d4424092fa133733abb7ad657b7b835 Mon Sep 17 00:00:00 2001 From: sasi Date: Tue, 25 Jun 2024 17:41:52 +0530 Subject: [PATCH 74/76] org name issue fix --- v2_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/v2_app.py b/v2_app.py index 267cf13..0571342 100644 --- a/v2_app.py +++ b/v2_app.py @@ -72,7 +72,7 @@ def get_issues_by_owner_id_v2(owner, issue): "mentor_id": dmp_issue_id['mentor_username'] , "contributor":define_link_data(cont_details), # "contributor_id": cont_details[0]['contributor_id'], - "org": define_link_data(repo_owner), + "org": define_link_data(repo_owner)[0] if repo_owner else None, "weekly_goals_html": w_goal_url, "weekly_learnings_html": w_learn_url, "overall_progress":calculate_overall_progress(week_data,12), From ae79633a230478847516d7f61bf783c815065a6a Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 27 Jun 2024 14:26:31 +0530 Subject: [PATCH 75/76] week goals metor check removed --- v2_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/v2_app.py b/v2_app.py index 0571342..12c9cb1 100644 --- a/v2_app.py +++ b/v2_app.py @@ -47,7 +47,7 @@ def get_issues_by_owner_id_v2(owner, issue): # mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} if val['body_text']: - if ("Weekly Goals" in val['body_text'] and not w_goal_url) and ("@"+val['created_by'].lower() == dmp_issue_id['mentor_username'].lower() if dmp_issue_id['mentor_username'] else None): + if ("Weekly Goals" in val['body_text'] and not w_goal_url): w_goal_url = val['body_text'] plain_text_body = markdown2.markdown(val['body_text']) tasks = re.findall(r'\[(x| )\]', plain_text_body) From 2457de4a56cefedf4610380f75fd3fc7534fc767 Mon Sep 17 00:00:00 2001 From: sasi Date: Thu, 27 Jun 2024 16:46:23 +0530 Subject: [PATCH 76/76] pr filter removed --- v2_app.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/v2_app.py b/v2_app.py index 12c9cb1..6f8ee2a 100644 --- a/v2_app.py +++ b/v2_app.py @@ -82,8 +82,7 @@ def get_issues_by_owner_id_v2(owner, issue): "weekly_learnings":week_data_formatter(plain_text_wurl,"Learnings") } - - pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('dmp_id', dmp_issue_id['id']).like('title', f'%#{issue} - %').execute() + pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('dmp_id', dmp_issue_id['id']).execute() transformed = {"pr_details": []} if pr_Data.data: for pr in pr_Data.data: