diff --git a/.github/workflows/runbook-reset-dev-mongo.yaml b/.github/workflows/runbook-reset-dev-mongo.yaml new file mode 100644 index 000000000..23b47025a --- /dev/null +++ b/.github/workflows/runbook-reset-dev-mongo.yaml @@ -0,0 +1,27 @@ +name: Reset Dev Mongo + +on: + workflow_dispatch: + +jobs: + reset-mongo: + name: SSH and Reset Dev MongoDB State + runs-on: ubuntu-latest + steps: + - name: SSH and Reset MongoDB + uses: appleboy/ssh-action@v1.2.0 + with: + host: ${{ secrets.SSH_HOST }} + username: ${{ secrets.SSH_USERNAME }} + key: ${{ secrets.SSH_KEY }} + script: | + set -e # Exit immediately if a command fails + + # Create Mongo job from mongo-reset + kubectl create job --from=cronjob/bt-base-reset-dev-mongo bt-base-reset-dev-mongo-ga-manual + echo "MongoDB reset scheduled." + + # Wait for job_pod log output + job_pod=$(kubectl get pods -o custom-columns=NAME:.metadata.name --no-headers -n bt | grep 'bt-base-reset-dev-mongo-ga-manual') + kubectl wait --for=condition=ready pod/$job_pod -n bt --timeout=30s + kubectl logs -f $job_pod -n bt diff --git a/apps/backend/src/bootstrap/loaders/apollo.ts b/apps/backend/src/bootstrap/loaders/apollo.ts index 1f74a230c..5b3aca5ce 100644 --- a/apps/backend/src/bootstrap/loaders/apollo.ts +++ b/apps/backend/src/bootstrap/loaders/apollo.ts @@ -60,7 +60,7 @@ export default async (redis: RedisClientType) => { }), responseCachePlugin(), ], - // TODO(production): Disable introspection in production + // TODO(prod): introspection: config.isDev, introspection: true, cache: new RedisCache(redis), }); diff --git a/apps/datapuller/src/pullers/main.ts b/apps/datapuller/src/pullers/main.ts index 9d9692379..c7a33ed9c 100644 --- a/apps/datapuller/src/pullers/main.ts +++ b/apps/datapuller/src/pullers/main.ts @@ -4,6 +4,8 @@ import setup from "../shared"; import { Config } from "../shared/config"; import updateClasses from "./classes"; import updateCourses from "./courses"; +import updateEnrollmentHistories from "./enrollment"; +import updateGradeDistributions from "./grade-distributions"; import updateSections from "./sections"; const testDatabaseWrite = async (config: Config) => { @@ -51,6 +53,12 @@ const main = async () => { config.log.info("\n=== UPDATE CLASSES ==="); await updateClasses(config); + config.log.info("\n=== UPDATE ENROLLMENTS ==="); + await updateEnrollmentHistories(config); + + config.log.info("\n=== UPDATE GRADES ==="); + await updateGradeDistributions(config); + config.log.info("\n=== DATA PULLING COMPLETED ==="); } catch (error) { config.log.error(error); diff --git a/docs/src/getting-started/local-development.md b/docs/src/getting-started/local-development.md index 2331b22ed..d5d38b664 100644 --- a/docs/src/getting-started/local-development.md +++ b/docs/src/getting-started/local-development.md @@ -1 +1,19 @@ # Local Development + +## Seeding Local Database + +A seeded database is required for some pages on the frontend. + +```sh +# ./berkeleytime + +# Ensure the MongoDB instance is already running. +docker compose up -d + +# Download the data +curl -O https://storage.googleapis.com/berkeleytime/public/stage_backup.gz + +# Copy the data and restore +docker cp ./stage_backup.gz berkeleytime-mongodb-1:/tmp/stage_backup.gz +docker exec berkeleytime-mongodb-1 mongorestore --drop --gzip --archive=/tmp/stage_backup.gz +``` diff --git a/infra/app/templates/cleanup.yaml b/infra/app/templates/cleanup.yaml index c7ec54d6b..bda957f3a 100644 --- a/infra/app/templates/cleanup.yaml +++ b/infra/app/templates/cleanup.yaml @@ -9,7 +9,7 @@ metadata: spec: template: spec: - serviceAccountName: bt-app-cleanup + serviceAccountName: bt-k8s-role containers: - name: cleanup image: alpine/helm diff --git a/infra/app/templates/datapuller.yaml b/infra/app/templates/datapuller.yaml index c8d42123d..09b3f2207 100644 --- a/infra/app/templates/datapuller.yaml +++ b/infra/app/templates/datapuller.yaml @@ -19,6 +19,8 @@ spec: spec: template: spec: + labels: + {{- include "bt-app.datapullerLabels" $root | nindent 12 }} containers: - name: datapuller-{{ $jobName }} image: {{ printf "%s/%s:%s" $jobConfig.image.registry $jobConfig.image.repository ( toString $jobConfig.image.tag ) }} diff --git a/infra/app/values.yaml b/infra/app/values.yaml index 052b4a331..ea401bde6 100644 --- a/infra/app/values.yaml +++ b/infra/app/values.yaml @@ -4,7 +4,7 @@ ttl: 24 # in hours host: berkeleytime.com port: 80 -mongoUri: mongodb://bt-prod-mongo-mongodb.bt.svc.cluster.local:27017/bt # TODO(core): change to replicaset +mongoUri: mongodb://bt-prod-mongo-mongodb-0.bt-prod-mongo-mongodb-headless.bt.svc.cluster.local:27017/bt redisUri: redis://bt-prod-redis-master.bt.svc.cluster.local:6379 nodeEnv: production diff --git a/infra/base/templates/issuer.yaml b/infra/base/templates/issuer.yaml index ba7dfb685..10890c7a5 100644 --- a/infra/base/templates/issuer.yaml +++ b/infra/base/templates/issuer.yaml @@ -1,4 +1,4 @@ -{{ /* https://cert-manager.io/docs/configuration/acme/dns01/cloudflare/#api-tokens */ }} +{{- /* https://cert-manager.io/docs/configuration/acme/dns01/cloudflare/#api-tokens */ -}} apiVersion: cert-manager.io/v1 kind: Issuer metadata: diff --git a/infra/base/templates/cleanup-role.yaml b/infra/base/templates/k8s-role.yaml similarity index 75% rename from infra/base/templates/cleanup-role.yaml rename to infra/base/templates/k8s-role.yaml index 042a2cd30..01647981d 100644 --- a/infra/base/templates/cleanup-role.yaml +++ b/infra/base/templates/k8s-role.yaml @@ -1,14 +1,14 @@ apiVersion: v1 kind: ServiceAccount metadata: - name: bt-app-cleanup + name: bt-k8s-role --- apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: - name: bt-app-cleanup + name: bt-k8s-role rules: - apiGroups: ["*"] resources: ["*"] @@ -19,12 +19,12 @@ rules: apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: - name: bt-app-cleanup + name: bt-k8s-role subjects: - kind: ServiceAccount - name: bt-app-cleanup + name: bt-k8s-role apiGroup: "" roleRef: kind: Role - name: bt-app-cleanup + name: bt-k8s-role apiGroup: "rbac.authorization.k8s.io" diff --git a/infra/base/templates/reset-dev-mongo.yaml b/infra/base/templates/reset-dev-mongo.yaml new file mode 100644 index 000000000..bd193241c --- /dev/null +++ b/infra/base/templates/reset-dev-mongo.yaml @@ -0,0 +1,53 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + name: {{ .Release.Name }}-reset-dev-mongo + namespace: bt +spec: + schedule: "0 5 * * *" # Daily at 5 AM, after datapuller + timeZone: America/Los_Angeles + concurrencyPolicy: Forbid + suspend: false + jobTemplate: + spec: + ttlSecondsAfterFinished: 180 + template: + spec: + serviceAccountName: bt-k8s-role + containers: + - name: reset-dev-mongo + image: alpine/k8s:1.29.11 + command: + - sh + - -c + - | + set -e # Exit immediately if a command fails + + # Find stage and dev MongoDB pods + stage_pod=$(kubectl get pods -o custom-columns=NAME:.metadata.name --no-headers -n bt | grep 'bt-stage-mongo') + dev_pod=$(kubectl get pods -o custom-columns=NAME:.metadata.name --no-headers -n bt | grep 'bt-dev-mongo') + + # Dump staging MongoDB state + echo "Dumping staging MongoDB state..." + kubectl exec --namespace=bt \ + "$stage_pod" -- mongodump --archive=/tmp/stage_backup.gz --gzip + kubectl cp --namespace=bt \ + "$stage_pod:/tmp/stage_backup.gz" /tmp/stage_backup.gz + kubectl exec --namespace=bt \ + "$stage_pod" -- rm /tmp/stage_backup.gz + + # Restore dump into dev MongoDB + echo "Restoring dump into dev MongoDB..." + kubectl cp --namespace=bt \ + /tmp/stage_backup.gz "$dev_pod:/tmp/stage_backup.gz" + kubectl exec --namespace=bt \ + "$dev_pod" -- mongosh bt --eval "db.dropDatabase()" + kubectl exec --namespace=bt \ + "$dev_pod" -- mongorestore --archive=/tmp/stage_backup.gz --gzip --drop + kubectl exec --namespace=bt \ + "$dev_pod" -- rm /tmp/stage_backup.gz + + # Cleanup local files + rm /tmp/stage_backup.gz + echo "MongoDB reset completed successfully!" + restartPolicy: Never