diff --git a/.github/workflows/_build-image-to-registry.yml b/.github/workflows/_build-image-to-registry.yml new file mode 100644 index 0000000..905f3a9 --- /dev/null +++ b/.github/workflows/_build-image-to-registry.yml @@ -0,0 +1,43 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +name: Call - Build Images to Registry +permissions: read-all +on: + workflow_call: + inputs: + node: + default: "xeon" + required: true + type: string + tag: + default: "latest" + required: false + type: string + +jobs: + call-build-image-to-registry: + runs-on: "docker-build-${{ inputs.node }}" + steps: + - name: Clean Up Working Directory + run: sudo rm -rf ${{github.workspace}}/* + + - name: Get Checkout Ref + run: | + if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then + echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV + else + echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV + fi + + - name: Checkout out Repo + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + fetch-depth: 0 + + - name: Build Image and Push Image + run: | + sudo apt install ansible -y + ansible-playbook build-image-to-registry.yml -e "container_registry=${OPEA_IMAGE_REPO}opea" -e "container_tag=${{ inputs.tag }}" + working-directory: ${{ github.workspace }}/setup-scripts/build-image-to-registry/ \ No newline at end of file diff --git a/.github/workflows/_e2e-test.yml b/.github/workflows/_e2e-test.yml new file mode 100644 index 0000000..8009f58 --- /dev/null +++ b/.github/workflows/_e2e-test.yml @@ -0,0 +1,97 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +name: Call - E2E Test +permissions: read-all +on: + workflow_call: + inputs: + node: + default: "xeon" + required: true + type: string + tag: + default: "latest" + required: false + type: string + +jobs: + call-e2e-test: + runs-on: "k8s-${{ inputs.node }}" + steps: + - name: Clean Up Working Directory + run: sudo rm -rf ${{github.workspace}}/* + + - name: Get Checkout Ref + run: | + if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then + echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV + else + echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV + fi + + - name: Checkout out Repo + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + fetch-depth: 0 + + - name: Update Manifest + run: | + find . -type f -name 'studio-manifest.yaml' -exec sed -i 's/value: opea/value: ${REGISTRY}/g' {} \; + working-directory: ${{ github.workspace }}/setup-scripts/setup-genai-studio/manifests/ + + - name: Deploy GenAI Studio + run: | + if kubectl get namespace studio; then + kubectl delete -f manifests/studio-manifest.yaml || true + kubectl wait --for=delete pod --all --namespace=studio --timeout=300s + fi + if kubectl get namespace monitoring; then + kubectl delete -f manifests/monitoring-manifest.yaml || true + kubectl wait --for=delete pod --all --namespace=monitoring --timeout=300s + fi + sleep 5 + sudo apt install ansible -y + ansible-playbook genai-studio.yml -e "container_registry=${OPEA_IMAGE_REPO}opea" -e "container_tag=${{ inputs.tag }}" + sleep 5 + kubectl wait --for=condition=ready pod --all --namespace=studio --timeout=300s --field-selector=status.phase!=Succeeded + kubectl wait --for=condition=ready pod --all --namespace=monitoring --timeout=300s --field-selector=status.phase!=Succeeded + working-directory: ${{ github.workspace }}/setup-scripts/setup-genai-studio/ + + - name: Set up Node.js + uses: actions/setup-node@v2 + with: + node-version: '20.18.0' + + - name: Install Dependencies + run: | + npm install + npx playwright install + npx playwright install-deps + working-directory: ${{ github.workspace }}/tests/playwright + + - name: Update Playwright Config + run: | + NODE_IP=$(kubectl get nodes -o jsonpath='{.items[0].status.addresses[?(@.type=="InternalIP")].address}') + sed -i "s|baseURL:.*|baseURL: \"http://$NODE_IP:30007\",|" playwright.config.js + working-directory: ${{ github.workspace }}/tests/playwright + + - name: Run Playwright Tests + run: npx playwright test + working-directory: ${{ github.workspace }}/tests/playwright + + - name: Upload Test Results + if: always() + uses: actions/upload-artifact@v4 + with: + name: playwright-test-results + path: ${{ github.workspace }}/tests/playwright/playwright-report + + - name: Cleanup sandbox namespaces + if: always() + run: | + for ns in $(kubectl get namespaces -o jsonpath='{.items[*].metadata.name}' | tr ' ' '\n' | grep '^sandbox-'); do + kubectl delete namespace $ns || true + done + diff --git a/.github/workflows/manual-docker-build.yml b/.github/workflows/manual-docker-build.yml new file mode 100644 index 0000000..3d0d2c4 --- /dev/null +++ b/.github/workflows/manual-docker-build.yml @@ -0,0 +1,42 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +name: Manual - Docker Build and Test +on: + workflow_dispatch: + inputs: + nodes: + default: "xeon" + description: "Hardware to run test" + required: true + type: string + tag: + default: "latest" + description: "Tag to apply to images" + required: true + type: string + e2e_test: + default: true + description: "Run E2E test after build" + required: false + type: boolean + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-on-manual-dispatch + cancel-in-progress: true + +jobs: + manual-build-images: + uses: ./.github/workflows/_build-image-to-registry.yml + with: + node: ${{ inputs.nodes }} + tag: ${{ inputs.tag }} + secrets: inherit + manual-run-e2e-test: + if: ${{ inputs.e2e_test }} + uses: ./.github/workflows/_e2e-test.yml + needs: manual-build-images + with: + node: ${{ inputs.nodes }} + tag: ${{ inputs.tag }} + secrets: inherit \ No newline at end of file diff --git a/.github/workflows/manual-docker-publish.yml b/.github/workflows/manual-docker-publish.yml new file mode 100644 index 0000000..d0b879c --- /dev/null +++ b/.github/workflows/manual-docker-publish.yml @@ -0,0 +1,68 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +name: Manual - Publish Docker Images +on: + workflow_dispatch: + inputs: + node: + default: "xeon" + description: "Hardware to run test" + required: true + type: string + studio_frontend: + description: "Publish studio-frontend image?" + required: true + type: boolean + default: true + studio_backend: + description: "Publish studio-backend image?" + required: true + type: boolean + default: true + app_frontend: + description: "Publish app-frontend image?" + required: true + type: boolean + default: true + app_backend: + description: "Publish app-backend image?" + required: true + type: boolean + default: true + tag: + default: "rc" + description: "Tag to publish, like [1.0rc]" + required: true + type: string + publish_tags: + default: "latest,1.x" + description: "Comma-separated tag list to apply to published images, like [latest,1.0]" + required: false + type: string + +permissions: read-all +jobs: + publish: + strategy: + matrix: + image: ${{ fromJson('[ "studio-frontend", "studio-backend", "app-frontend", "app-backend" ]') }} + fail-fast: false + runs-on: "docker-build-${{ inputs.node }}" + steps: + - uses: docker/login-action@v3.2.0 + with: + username: ${{ secrets.DOCKERHUB_USER }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Check if image should be published + if: ${{ github.event.inputs[ matrix.image ] == 'true' }} + run: echo "Publishing ${{ matrix.image }} image" + + - name: Image Publish + if: ${{ github.event.inputs[ matrix.image ] == 'true' }} + uses: opea-project/validation/actions/image-publish@main + with: + local_image_ref: ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }} + image_name: opea/${{ matrix.image }} + publish_tags: ${{ inputs.publish_tags }} \ No newline at end of file diff --git a/.github/workflows/manual-docker-scan.yml b/.github/workflows/manual-docker-scan.yml new file mode 100644 index 0000000..0f7f42e --- /dev/null +++ b/.github/workflows/manual-docker-scan.yml @@ -0,0 +1,103 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +name: Manual - Docker Scan (SBOM and CVE) +on: + workflow_dispatch: + inputs: + node: + default: "xeon" + description: "Hardware to run scan" + required: true + type: string + tag: + default: "latest" + description: "Tag for images to scan" + required: true + type: string + sbom_scan: + default: true + description: 'Scan images for BoM' + required: false + type: boolean + trivy_scan: + default: true + description: 'Scan images for CVE' + required: false + type: boolean + +permissions: read-all +jobs: + clean-workspace: + runs-on: "docker-build-${{ inputs.node }}" + steps: + - name: Clean up Working Directory + run: | + sudo rm -rf ${{github.workspace}}/* || true + # docker system prune -f + + manual-docker-scan: + needs: clean-workspace + runs-on: "docker-build-${{ inputs.node }}" + strategy: + matrix: + image: ["studio-frontend", "studio-backend", "app-frontend", "app-backend"] + fail-fast: false + max-parallel: 2 + steps: + - name: Pull Image + run: | + docker pull ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }} + echo "OPEA_IMAGE_REPO=${OPEA_IMAGE_REPO}" >> $GITHUB_ENV + + - name: SBOM Scan Container + uses: anchore/sbom-action@v0.17.1 + if: ${{ inputs.sbom_scan }} + with: + image: ${{ env.OPEA_IMAGE_REPO }}opea/${{ matrix.image }}:${{ inputs.tag }} + output-file: ${{ matrix.image }}-sbom-scan.txt + format: 'spdx-json' + + - name: Security Scan Container + uses: aquasecurity/trivy-action@0.24.0 + if: ${{ inputs.trivy_scan }} + with: + image-ref: ${{ env.OPEA_IMAGE_REPO }}opea/${{ matrix.image }}:${{ inputs.tag }} + output: ${{ matrix.image }}-trivy-scan.txt + format: 'table' + exit-code: '1' + ignore-unfixed: true + vuln-type: 'os,library' + severity: 'CRITICAL,HIGH' + + - name: Cleanup + if: always() + run: docker rmi -f ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }} || true + + - name: Collect Logs + if: always() + run: | + mkdir -p /tmp/scan-${{ inputs.tag }}-${{ github.run_number }} + mv ${{ matrix.image }}-*-scan.txt /tmp/scan-${{ inputs.tag }}-${{ github.run_number }} + + upload-artifacts: + needs: manual-docker-scan + runs-on: "docker-build-${{ inputs.node }}" + if: always() + steps: + - name: Upload SBOM Artifacts + uses: actions/upload-artifact@v4.3.4 + with: + name: sbom-scan-${{ inputs.tag }}-${{ github.run_number }} + path: /tmp/scan-${{ inputs.tag }}-${{ github.run_number }}/*-sbom-scan.txt + overwrite: true + + - name: Upload Trivy Artifacts + uses: actions/upload-artifact@v4.3.4 + with: + name: trivy-scan-${{ inputs.tag }}-${{ github.run_number }} + path: /tmp/scan-${{ inputs.tag }}-${{ github.run_number }}/*-trivy-scan.txt + overwrite: true + + - name: Remove Logs + run: rm -rf /tmp/scan-${{ inputs.tag }}-${{ github.run_number }} && rm -rf /tmp/sbom-action-* \ No newline at end of file diff --git a/.github/workflows/nightly-e2e-test.yml b/.github/workflows/nightly-e2e-test.yml new file mode 100644 index 0000000..7390169 --- /dev/null +++ b/.github/workflows/nightly-e2e-test.yml @@ -0,0 +1,24 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +name: Nightly - E2E test + +on: + workflow_dispatch: + schedule: + - cron: "5 18 * * *" # UTC time + +jobs: + nightly-build-images: + uses: ./.github/workflows/_build-image-to-registry.yml + with: + node: xeon + tag: latest + secrets: inherit + nightly-run-e2e-test: + uses: ./.github/workflows/_e2e-test.yml + needs: nightly-build-images + with: + node: xeon + tag: latest + secrets: inherit \ No newline at end of file diff --git a/.github/workflows/pr-code-scan.yml b/.github/workflows/pr-code-scan.yml index 6181895..13ab32a 100644 --- a/.github/workflows/pr-code-scan.yml +++ b/.github/workflows/pr-code-scan.yml @@ -1,7 +1,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -name: Code Scan +name: PR - Code Scan (Bandit and Hadolint) on: pull_request: @@ -24,7 +24,7 @@ env: CONTAINER_NAME: "code-scan" jobs: - code-scan: + pr-code-scan: runs-on: ubuntu-latest strategy: matrix: diff --git a/.github/workflows/pr-e2e-test.yml b/.github/workflows/pr-e2e-test.yml new file mode 100644 index 0000000..193c85a --- /dev/null +++ b/.github/workflows/pr-e2e-test.yml @@ -0,0 +1,31 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +name: PR - E2E test + +on: + pull_request: + branches: ["main", "*rc"] + types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped + paths-ignore: + - "**.md" + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + pr-build-images: + uses: ./.github/workflows/_build-image-to-registry.yml + with: + node: xeon + tag: ${{ github.event_name == 'workflow_dispatch' && 'latest' || github.event.pull_request.head.sha }} + secrets: inherit + pr-run-e2e-test: + uses: ./.github/workflows/_e2e-test.yml + needs: pr-build-images + with: + node: xeon + tag: ${{ github.event_name == 'workflow_dispatch' && 'latest' || github.event.pull_request.head.sha }} + secrets: inherit \ No newline at end of file diff --git a/.github/workflows/weekly-trellix-scan.yml b/.github/workflows/weekly-trellix-scan.yml index 3165054..06fd783 100644 --- a/.github/workflows/weekly-trellix-scan.yml +++ b/.github/workflows/weekly-trellix-scan.yml @@ -1,7 +1,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -name: Trellix Command Line Scanner +name: Weekly - Trellix Scan on: workflow_dispatch: diff --git a/README.md b/README.md index 4b10bb5..1da3ff7 100644 --- a/README.md +++ b/README.md @@ -5,8 +5,8 @@ ## Reduce Barrier of Adoption with Low-Code: End-to-End GenAI App Development for Productivity - **🌟Key Highlights** - - 🛠️ Build & configure GenAI applications rapidly with low-code with no setup hassle + **🌟Key Capabilities** + - 🛠️ Build & configure GenAI applications rapidly with low-code and no setup hassle - 🔍 Instant evaluation with sandbox on Kubernetes Namespace - 📊 Dynamic performance measurement & benchmarking with Grafana dashboarding via Prometheus - ⚙️ One-click deployment package generation for instant application setup @@ -76,7 +76,7 @@ _Note: This setup has been validated on a system running Ubuntu 22.04 on an Inte GenAIStudio requires an on-premise Kubernetes cluster. If your server does not have Kubernetes set up, please install by following the [Kubernetes official setup guide](https://kubernetes.io/docs/setup/). Alternatively, you can try out our [setup onpremise kubernetes script](./setup-scripts/setup-onpremise-kubernetes/readme.md). ### Installation -The installation is done using genai-studio-playbook script. The script will +The installation is done using genai-studio playbook script. The script will - Deploy a persistent volume for prometheus and a customized monitoring stack based on prometheus-community/kube-prometheus-stack (which contains both Prometheus and Grafana) in the monitoring namespace. - Deploy the studio-backend, studio-frontend and also a studio-nginx in the studio namespace. @@ -89,12 +89,11 @@ The installation can be done with the following steps: ``` 2. **Run the commands below** ```sh - sudo apt install ansible - ansible-galaxy collection install kubernetes.core #install dependencies for k8s + sudo apt install ansible -y cd setup-scripts/setup-genai-studio - ansible-playbook genai-studio-playbook.yml + ansible-playbook genai-studio.yml ``` - _Note: you can review the deployment configurations in [genai-studio-playbook.yml](https://github.com/opea-project/GenAIStudio/blob/main/setup-scripts/setup-genai-studio/genai-studio-playbook.yml)_ + _Note: you can review the deployment configurations in [genai-studio.yml](./setup-scripts/setup-genai-studio/genai-studio.yml)_ ## Getting Started with GenAIStudio diff --git a/assets/screenshots/key_components.png b/assets/screenshots/key_components.png index cf4daed..1fc1b72 100644 Binary files a/assets/screenshots/key_components.png and b/assets/screenshots/key_components.png differ diff --git a/setup-scripts/build-image-to-registry/readme.md b/setup-scripts/build-image-to-registry/readme.md index 698485e..9c8e3da 100644 --- a/setup-scripts/build-image-to-registry/readme.md +++ b/setup-scripts/build-image-to-registry/readme.md @@ -11,6 +11,6 @@ The ansible scripts used here are building, tag and push to the specified contai Run below commands: ```sh -sudo apt install ansible +sudo apt install ansible -y ansible-playbook build-image-to-registry.yml ``` \ No newline at end of file diff --git a/setup-scripts/setup-genai-studio/manifests/monitoring-manifest.yaml b/setup-scripts/setup-genai-studio/manifests/monitoring-manifest.yaml index b5c4e55..71ff489 100644 --- a/setup-scripts/setup-genai-studio/manifests/monitoring-manifest.yaml +++ b/setup-scripts/setup-genai-studio/manifests/monitoring-manifest.yaml @@ -285,6 +285,7 @@ spec: resources: requests: storage: "5Gi" + storageClassName: local-path --- # Source: kube-prometheus-stack/charts/grafana/templates/clusterrole.yaml kind: ClusterRole @@ -1664,6 +1665,7 @@ spec: storage: volumeClaimTemplate: spec: + storageClassName: local-path accessModes: ["ReadWriteOnce"] resources: requests: diff --git a/setup-scripts/setup-genai-studio/playbooks/deploy-monitoring.yml b/setup-scripts/setup-genai-studio/playbooks/deploy-monitoring.yml index 39c2622..f64b1cd 100644 --- a/setup-scripts/setup-genai-studio/playbooks/deploy-monitoring.yml +++ b/setup-scripts/setup-genai-studio/playbooks/deploy-monitoring.yml @@ -1,13 +1,21 @@ -- name: Deploy prometheus and grafana +- name: Deploy prometheus and grafana with local-path-storage hosts: localhost tasks: - - name: Create directory for Prometheus data - file: - path: /mnt/data/prometheus - state: directory - mode: '0777' - become: yes - + - name: Check if local-path-storage namespace exists + shell: kubectl get namespace local-path-storage --ignore-not-found + register: namespace_check + ignore_errors: yes + changed_when: false + + - name: Install local-path-provisioner if namespace does not exist + shell: kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.30/deploy/local-path-storage.yaml + when: namespace_check.stdout == "" + register: apply_output + + - name: Wait for local-path-provisioner to be ready + shell: kubectl wait --for=condition=Ready pod -l app=local-path-provisioner -n local-path-storage --timeout=120s + when: namespace_check.stdout == "" + - name: Create monitoring namespace command: kubectl create namespace monitoring ignore_errors: yes diff --git a/setup-scripts/setup-genai-studio/playbooks/deploy-studio.yml b/setup-scripts/setup-genai-studio/playbooks/deploy-studio.yml index f95a348..73ab74a 100644 --- a/setup-scripts/setup-genai-studio/playbooks/deploy-studio.yml +++ b/setup-scripts/setup-genai-studio/playbooks/deploy-studio.yml @@ -14,6 +14,16 @@ command: kubectl create namespace studio ignore_errors: yes + - name: Check for coredns service + shell: kubectl get svc coredns -n kube-system --ignore-not-found + register: coredns_check + ignore_errors: yes + changed_when: false + + - name: Update manifest if coredns is available + shell: sed -i 's/kube-dns/coredns/g' ../manifests/studio-manifest.yaml + when: coredns_check.stdout != '' + - name: Apply customized studio manifest shell: "envsubst '${REGISTRY} ${TAG}' < ../manifests/studio-manifest.yaml | kubectl apply -f -" environment: diff --git a/setup-scripts/setup-genai-studio/readme.md b/setup-scripts/setup-genai-studio/readme.md index 181e82e..0115fea 100644 --- a/setup-scripts/setup-genai-studio/readme.md +++ b/setup-scripts/setup-genai-studio/readme.md @@ -2,11 +2,12 @@ The genai-studio playbook script will: -1. Deploy a persistent volume for prometheus and a customized monitoring stack based on prometheus-community/kube-prometheus-stack (which contains both Prometheus and Grafana) in the monitoring namespace. +1. Deploy a customized monitoring stack based on prometheus-community/kube-prometheus-stack (which contains both Prometheus and Grafana) in the monitoring namespace with a local-path-provisioner in local-path-storage namespace, for dynamic Persistent Volumes (PVs) provisioning. 2. Deploy the studio-backend, studio-frontend and also a studio-nginx in the studio namespace. + ### Pre-requisite - Existing kubernetes cluster available. If not, please install by following the [Kubernetes official setup guide](https://kubernetes.io/docs/setup/). Alternatively, you can try out our [setup onpremise kubernetes script](../setup-onpremise-kubernetes/readme.md). @@ -16,8 +17,7 @@ The genai-studio playbook script will: Run below commands: ```sh -sudo apt install ansible -ansible-galaxy collection install kubernetes.core +sudo apt install ansible -y ansible-playbook genai-studio.yml ``` diff --git a/setup-scripts/setup-onpremise-kubernetes/inventory.ini b/setup-scripts/setup-onpremise-kubernetes/inventory.ini index a58dec2..0696c7f 100644 --- a/setup-scripts/setup-onpremise-kubernetes/inventory.ini +++ b/setup-scripts/setup-onpremise-kubernetes/inventory.ini @@ -1,8 +1,3 @@ -[all] -# 10.0.0.0 -# 10.0.0.1 -# 10.0.0.2 - [k8_master] # 10.0.0.0 diff --git a/setup-scripts/setup-onpremise-kubernetes/readme.md b/setup-scripts/setup-onpremise-kubernetes/readme.md index ac74732..a7d52fa 100644 --- a/setup-scripts/setup-onpremise-kubernetes/readme.md +++ b/setup-scripts/setup-onpremise-kubernetes/readme.md @@ -9,11 +9,12 @@ The ansible scripts used here are using kubeadm method of installing an onpremis ### Installation steps: +_Note: This script has only been validated on a fresh installed Ubuntu 22.04 machines._ + Run below commands: ```sh -sudo apt install ansible -ansible-galaxy collection install kubernetes.core -ansible-playbook -i inventory.ini kubernetes-cluster.yml +sudo apt install ansible -y +ansible-playbook -i inventory.ini onpremise-kubernetes.yml ``` To push your local docker images into the harbor container registry, run below: diff --git a/studio-frontend/Dockerfile b/studio-frontend/Dockerfile index 986ede4..1606cbd 100644 --- a/studio-frontend/Dockerfile +++ b/studio-frontend/Dockerfile @@ -9,6 +9,12 @@ RUN apk add --no-cache gcompat=1.1.0-r4 python3=3.12.7-r0 make=4.4.1-r2 g++=13.2 # Install PNPM globally npm install -g pnpm@9.12.3 +# Install git +RUN apk add --no-cache git=2.45.2-r0 + +# Debug step to verify git installation +RUN git --version + ENV PUPPETEER_SKIP_DOWNLOAD=true ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser ENV NODE_OPTIONS=--max-old-space-size=8192 @@ -21,8 +27,10 @@ COPY . . # Install dependencies and build the app RUN pnpm config set store-dir .pnpm-store && \ pnpm install && \ + pnpm build && \ + pnpm remove esbuild && \ rm -rf .pnpm-store && \ - pnpm build + pnpm prune --prod EXPOSE 3000 diff --git a/studio-frontend/package.json b/studio-frontend/package.json index 1081d31..1095a92 100644 --- a/studio-frontend/package.json +++ b/studio-frontend/package.json @@ -32,6 +32,7 @@ "@babel/preset-typescript": "7.18.6", "@types/express": "^4.17.13", "@typescript-eslint/typescript-estree": "^7.13.1", + "esbuild": "^0.21.5", "eslint": "^8.24.0", "eslint-config-prettier": "^8.3.0", "eslint-config-react-app": "^7.0.1", @@ -48,7 +49,7 @@ "pretty-quick": "^3.1.3", "rimraf": "^3.0.2", "run-script-os": "^1.1.6", - "turbo": "1.10.16", + "turbo": "latest", "typescript": "^5.4.5" }, "pnpm": { @@ -70,13 +71,12 @@ "openai": "4.51.0", "@langchain/core": "0.2.18", "axios": "1.7.4", - "lunary": "0.7.13", "nth-check": "2.0.1", "pdfjs-dist": "4.2.67", "prismjs": "1.27.0", "semver": "7.5.2", "ws": "8.17.1", - "@esbuild/linux-x64": "0.21.5" + "esbuild": "^0.24.0" }, "eslintIgnore": [ "**/dist", @@ -93,6 +93,7 @@ "semi": false, "endOfLine": "auto" }, + "packageManager": "pnpm@9.0.0", "babel": { "presets": [ "@babel/preset-typescript", diff --git a/studio-frontend/turbo.json b/studio-frontend/turbo.json index a0c9800..bea91f3 100644 --- a/studio-frontend/turbo.json +++ b/studio-frontend/turbo.json @@ -1,6 +1,6 @@ { "$schema": "https://turbo.build/schema.json", - "pipeline": { + "tasks": { "build": { "dependsOn": ["^build"], "outputs": ["dist/**"] diff --git a/tests/playwright/studio-e2e/002_test_sandbox_chatqna.spec.ts b/tests/playwright/studio-e2e/002_test_sandbox_chatqna.spec.ts index 04186a6..585796f 100644 --- a/tests/playwright/studio-e2e/002_test_sandbox_chatqna.spec.ts +++ b/tests/playwright/studio-e2e/002_test_sandbox_chatqna.spec.ts @@ -3,8 +3,8 @@ import { waitForStatusText } from '../utils'; import path from 'path'; const sampleWorkflow = path.resolve(__dirname, '../../../sample-workflows/sample_workflow_chatqna.json'); -const uploadPDF1 = path.resolve(__dirname, '../../test-files/Q3 24_EarningsRelease.pdf'); -const uploadPDF2 = path.resolve(__dirname, '../../test-files/tennis_tutorial.pdf'); +const uploadPDF1 = path.resolve(__dirname, '../../test-files/tennis_tutorial.pdf'); +const uploadPDF2 = path.resolve(__dirname, '../../test-files/Q3 24_EarningsRelease.pdf'); const question = "what is intel third-quarter 2024 revenue?"; const keyword = "$13.3 billion"; @@ -53,7 +53,7 @@ test('002_test_sandbox_chatqna', async ({ page, baseURL }) => { await expect(page.locator('td.MuiTableCell-root div.MuiStack-root p.MuiTypography-root').first()).toHaveText('Not Running', { timeout: 60000 }); await page.getByLabel('a dense table').locator('button').first().click(); await waitForStatusText(page, 'td.MuiTableCell-root div.MuiStack-root p.MuiTypography-root', 'Ready', 5, 60000); - await page.waitForTimeout(8000); + await page.waitForTimeout(10000); // Open APP-UI const page2Promise = page.waitForEvent('popup'); @@ -73,51 +73,54 @@ test('002_test_sandbox_chatqna', async ({ page, baseURL }) => { } apiResponse.value = ""; - // Document Upload 1 - await page2.getByRole('button').nth(2).click(); await page2.getByRole('button').nth(2).click(); + await page2.getByRole('button').nth(2).click(); // Double click + + // Document Upload 1 fileChooserPromise = page2.waitForEvent('filechooser'); await page2.getByRole('button', { name: 'Choose File' }).click() fileChooser = await fileChooserPromise; - await fileChooser.setFiles(uploadPDF1); // TBD: Update the path to the file + await fileChooser.setFiles(uploadPDF1); await page2.getByRole('button', { name: 'Upload', exact: true }).click(); await page2.waitForSelector('tr:nth-of-type(1) button[data-variant="light"] .tabler-icon-check', { state: 'visible', timeout: 300000 }); // Refresh page and verify upload with retry - let isVisible = false; + let isVisible1 = false; for (let i = 0; i < 2; i++) { await page2.reload(); await page2.waitForTimeout(1500); await page2.getByRole('button').nth(2).click(); try { - await expect(page2.getByRole('cell', { name: 'Q3 24_EarningsRelease' })).toBeVisible({ timeout: 60000 }); - isVisible = true; + await expect(page2.getByRole('cell', { name: 'tennis_tutorial.pdf' })).toBeVisible({ timeout: 60000 }); + isVisible1 = true; break; } catch (error) { console.log(`Attempt ${i + 1} failed: ${error}`); } } - + await page2.waitForTimeout(1000); + // Document Upload 2 fileChooserPromise = page2.waitForEvent('filechooser'); await page2.getByRole('button', { name: 'Choose File' }).click() fileChooser = await fileChooserPromise; - await fileChooser.setFiles(uploadPDF2); // TBD Update the file path + await fileChooser.setFiles(uploadPDF2); await page2.getByRole('button', { name: 'Upload', exact: true }).click(); await page2.waitForSelector('tr:nth-of-type(2) button[data-variant="light"] .tabler-icon-check', { state: 'visible', timeout: 300000 }); // Refresh page and verify upload with retry - isVisible = false; + let isVisible2 = false; for (let i = 0; i < 2; i++) { await page2.reload(); await page2.waitForTimeout(1500); await page2.getByRole('button').nth(2).click(); try { - await expect(page2.getByRole('cell', { name: 'tennis_tutorial.pdf' })).toBeVisible({ timeout: 60000 }); - isVisible = true; - break; + await expect(page2.getByRole('cell', { name: 'Q3 24_EarningsRelease' })).toBeVisible({ timeout: 60000 }); + isVisible2 = true; + break; } catch (error) { - console.log(`Attempt ${i + 1} failed: ${error}`); + console.log(`Attempt ${i + 1} failed: ${error}`); } } + await page2.waitForTimeout(1000); // Link Upload await page2.getByRole('button', { name: 'Use Link' }).click(); @@ -126,20 +129,21 @@ test('002_test_sandbox_chatqna', async ({ page, baseURL }) => { await page2.getByRole('button', { name: 'Upload', exact: true }).click(); await page2.waitForSelector('tr:nth-of-type(3) button[data-variant="light"] .tabler-icon-check', { state: 'visible', timeout: 300000 }); // Refresh page and verify upload with retry - isVisible = false; + let isVisible3 = false; for (let i = 0; i < 2; i++) { await page2.reload(); await page2.waitForTimeout(1500); await page2.getByRole('button').nth(2).click(); try { - await expect(page2.getByRole('cell', { name: 'https://pnatraj.medium.com/' })).toBeVisible({ timeout: 60000 }); - isVisible = true; + await expect(page2.getByRole('cell', { name: 'https://pnatraj.medium.com/' })).toBeVisible({ timeout: 60000 }); + isVisible3 = true; break; } catch (error) { - console.log(`Attempt ${i + 1} failed: ${error}`); + console.log(`Attempt ${i + 1} failed: ${error}`); } } await page2.getByRole('banner').getByRole('button').click(); + await page2.waitForTimeout(10000); // Chat Attempt 2 await page2.getByPlaceholder('Ask a question').fill(question);