diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 48c066480dc..eecb81148e6 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -4,10 +4,13 @@ "features": { "ghcr.io/devcontainers/features/docker-in-docker:1": {}, "ghcr.io/devcontainers/features/dotnet": { - "version": "6.0.415" + "version": "6.0.419" }, "ghcr.io/devcontainers/features/node:1": { "version": "16" + }, + "ghcr.io/devcontainers/features/sshd:1": { + "version": "latest" } }, "customizations": { diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 927aad7f061..fe37f50036b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -35,29 +35,6 @@ jobs: bash ${{ matrix.devScript }} layout Release ${{ matrix.runtime }} working-directory: src - # Check runtime/externals hash - - name: Compute/Compare runtime and externals Hash - shell: bash - run: | - echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH" - echo "Current Externals hash result: $EXTERNALS_HASH" - - NeedUpdate=0 - if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then - echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH - NeedUpdate=1 - fi - - if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then - echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH - NeedUpdate=1 - fi - - exit $NeedUpdate - env: - DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}} - EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}} - # Run tests - name: L0 run: | @@ -80,6 +57,3 @@ jobs: name: runner-package-${{ matrix.runtime }} path: | _package - _package_trims/trim_externals - _package_trims/trim_runtime - _package_trims/trim_runtime_externals diff --git a/.github/workflows/close-bugs-bot.yml b/.github/workflows/close-bugs-bot.yml index 8c37e97eeab..4a0e129fbfb 100644 --- a/.github/workflows/close-bugs-bot.yml +++ b/.github/workflows/close-bugs-bot.yml @@ -15,4 +15,3 @@ jobs: only-labels: "actions-bug" days-before-stale: 0 days-before-close: 1 - close-issue-reason: "completed" diff --git a/.github/workflows/close-features-bot.yml b/.github/workflows/close-features-bot.yml index e20d5bbee4a..a710a8a842a 100644 --- a/.github/workflows/close-features-bot.yml +++ b/.github/workflows/close-features-bot.yml @@ -15,4 +15,3 @@ jobs: only-labels: "actions-feature" days-before-stale: 0 days-before-close: 1 - close-issue-reason: "completed" diff --git a/.github/workflows/dotnet-upgrade.yml b/.github/workflows/dotnet-upgrade.yml index e3d23d2542e..eb15e762e5f 100644 --- a/.github/workflows/dotnet-upgrade.yml +++ b/.github/workflows/dotnet-upgrade.yml @@ -84,221 +84,20 @@ jobs: git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" git push --set-upstream origin $branch_name - build-hashes: - if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }} - needs: [dotnet-update] - outputs: - # pass outputs from this job to create-pr for use - DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} - DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }} - NEEDS_HASH_UPDATE: ${{ steps.compute-hash.outputs.NEED_UPDATE }} - strategy: - fail-fast: false - matrix: - runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ] - include: - - runtime: linux-x64 - os: ubuntu-latest - devScript: ./dev.sh - - - runtime: linux-arm64 - os: ubuntu-latest - devScript: ./dev.sh - - - runtime: linux-arm - os: ubuntu-latest - devScript: ./dev.sh - - - runtime: osx-x64 - os: macOS-latest - devScript: ./dev.sh - - - runtime: osx-arm64 - os: macOS-latest - devScript: ./dev.sh - - - runtime: win-x64 - os: windows-2019 - devScript: ./dev - - - runtime: win-arm64 - os: windows-latest - devScript: ./dev - - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v3 - with: - ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} - - # Build runner layout - - name: Build & Layout Release - run: | - ${{ matrix.devScript }} layout Release ${{ matrix.runtime }} - working-directory: src - - # Check runtime/externals hash - - name: Compute/Compare runtime and externals Hash - id: compute-hash - continue-on-error: true - shell: bash - run: | - echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH" - echo "Current Externals hash result: $EXTERNALS_HASH" - - NeedUpdate=0 - if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then - echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH - - echo "EXTERNAL_HASH=$EXTERNALS_HASH" >> $GITHUB_OUTPUT - NeedUpdate=1 - fi - - if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then - echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH - - echo "DOTNET_RUNTIME_HASH=$DOTNET_RUNTIME_HASH" >> $GITHUB_OUTPUT - NeedUpdate=1 - fi - - echo "NEED_UPDATE=$NeedUpdate" >> $GITHUB_OUTPUT - env: - DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}} - EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}} - - name: update hash - if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }} - shell: bash - run: | - ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }} - DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }} - - if [ -n "$ExternalHash" ]; then - echo "$ExternalHash" > ./src/Misc/contentHash/externals/${{ matrix.runtime }} - fi - - if [ -n "$DotNetRuntimeHash" ]; then - echo "$DotNetRuntimeHash" > ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} - fi - - name: cache updated hashes - if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }} - uses: actions/cache/save@v3 - with: - enableCrossOsArchive: true - path: | - ./src/Misc/contentHash/externals/${{ matrix.runtime }} - ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} - key: compute-hashes-${{ matrix.runtime }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} - - - hash-update: - needs: [build-hashes] - if: ${{ needs.build-hashes.outputs.NEEDS_HASH_UPDATE == 1 }} - outputs: - # pass outputs from this job to create-pr for use - DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} - DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.build-hashes.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - ref: feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} - - name: Restore cached hashes - linux-x64 - id: cache-restore-linux-x64 - uses: actions/cache/restore@v3 - with: - enableCrossOsArchive: true - path: | - ./src/Misc/contentHash/externals/linux-x64 - ./src/Misc/contentHash/dotnetRuntime/linux-x64 - key: compute-hashes-linux-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} - - name: Restore cached hashes - linux-arm64 - id: cache-restore-linux-arm64 - uses: actions/cache/restore@v3 - with: - enableCrossOsArchive: true - path: | - ./src/Misc/contentHash/externals/linux-arm64 - ./src/Misc/contentHash/dotnetRuntime/linux-arm64 - key: compute-hashes-linux-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} - - name: Restore cached hashes - linux-arm - id: cache-restore-linux-arm - uses: actions/cache/restore@v3 - with: - enableCrossOsArchive: true - path: | - ./src/Misc/contentHash/externals/linux-arm - ./src/Misc/contentHash/dotnetRuntime/linux-arm - key: compute-hashes-linux-arm-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} - - name: Restore cached hashes - osx-x64 - id: cache-restore-osx-x64 - uses: actions/cache/restore@v3 - with: - enableCrossOsArchive: true - path: | - ./src/Misc/contentHash/externals/osx-x64 - ./src/Misc/contentHash/dotnetRuntime/osx-x64 - key: compute-hashes-osx-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} - - name: Restore cached hashes - osx-arm64 - id: cache-restore-osx-arm64 - uses: actions/cache/restore@v3 - with: - enableCrossOsArchive: true - path: | - ./src/Misc/contentHash/externals/osx-arm64 - ./src/Misc/contentHash/dotnetRuntime/osx-arm64 - key: compute-hashes-osx-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} - - name: Restore cached hashes - win-x64 - id: cache-restore-win-x64 - uses: actions/cache/restore@v3 - with: - enableCrossOsArchive: true - path: | - ./src/Misc/contentHash/externals/win-x64 - ./src/Misc/contentHash/dotnetRuntime/win-x64 - key: compute-hashes-win-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} - - name: Restore cached hashes - win-arm64 - id: cache-restore-win-arm64 - uses: actions/cache/restore@v3 - with: - enableCrossOsArchive: true - path: | - ./src/Misc/contentHash/externals/win-arm64 - ./src/Misc/contentHash/dotnetRuntime/win-arm64 - key: compute-hashes-win-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} - - name: Fetch cached computed hashes - if: steps.cache-restore-linux-x64.outputs.cache-hit == 'true' || - steps.cache-restore-linux-arm64.outputs.cache-hit == 'true' || - steps.cache-restore-linux-arm.outputs.cache-hit == 'true' || - steps.cache-restore-win-x64.outputs.cache-hit == 'true' || - steps.cache-restore-win-arm64.outputs.cache-hit == 'true' || - steps.cache-restore-osx-x64.outputs.cache-hit == 'true' || - steps.cache-restore-osx-arm64.outputs.cache-hit == 'true' - shell: bash - run: | - Environments=( "linux-x64" "linux-arm64" "linux-arm" "win-x64" "win-arm64" "osx-x64" "osx-arm64" ) - - git config --global user.name "github-actions[bot]" - git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com" - git commit -a -m "Update computed hashes" - git push --set-upstream origin feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} - create-pr: - needs: [hash-update] - outputs: - # pass outputs from this job to run-tests for use - DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} - DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }} + needs: [dotnet-update] + if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 with: - ref: feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} + ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} - name: Create Pull Request env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body " - https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version + gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body " + https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version --- diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 85d33bdf001..7621240f34a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -53,27 +53,6 @@ jobs: win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }} osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }} osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }} - linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }} - linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }} - linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }} - win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }} - win-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-arm64-sha256 }} - osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }} - osx-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-arm64-sha256 }} - linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }} - linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }} - linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }} - win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }} - win-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-arm64-sha256 }} - osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }} - osx-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-arm64-sha256 }} - linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }} - linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }} - linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }} - win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }} - win-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-arm64-sha256 }} - osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }} - osx-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-arm64-sha256 }} strategy: matrix: runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ] @@ -136,76 +115,6 @@ jobs: id: sha name: Compute SHA256 working-directory: _package - - run: | - file=$(ls) - sha=$(sha256sum $file | awk '{ print $1 }') - echo "Computed sha256: $sha for $file" - echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT - echo "sha256=$sha" >> $GITHUB_OUTPUT - shell: bash - id: sha_noexternals - name: Compute SHA256 - working-directory: _package_trims/trim_externals - - run: | - file=$(ls) - sha=$(sha256sum $file | awk '{ print $1 }') - echo "Computed sha256: $sha for $file" - echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT - echo "sha256=$sha" >> $GITHUB_OUTPUT - shell: bash - id: sha_noruntime - name: Compute SHA256 - working-directory: _package_trims/trim_runtime - - run: | - file=$(ls) - sha=$(sha256sum $file | awk '{ print $1 }') - echo "Computed sha256: $sha for $file" - echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT - echo "sha256=$sha" >> $GITHUB_OUTPUT - shell: bash - id: sha_noruntime_noexternals - name: Compute SHA256 - working-directory: _package_trims/trim_runtime_externals - - - name: Create trimmedpackages.json for ${{ matrix.runtime }} - if: matrix.runtime == 'win-x64' || matrix.runtime == 'win-arm64' - uses: actions/github-script@0.3.0 - with: - github-token: ${{secrets.GITHUB_TOKEN}} - script: | - const core = require('@actions/core') - const fs = require('fs'); - const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '') - var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(//g, runnerVersion).replace(//g, '${{ matrix.runtime }}') - trimmedPackages = trimmedPackages.replace(//g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}') - trimmedPackages = trimmedPackages.replace(//g, '${{hashFiles('**/_layout_trims/externals/**/*')}}') - - trimmedPackages = trimmedPackages.replace(//g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}') - trimmedPackages = trimmedPackages.replace(//g, '${{steps.sha_noruntime.outputs.sha256}}') - trimmedPackages = trimmedPackages.replace(//g, '${{steps.sha_noexternals.outputs.sha256}}') - - console.log(trimmedPackages) - fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages) - - - name: Create trimmedpackages.json for ${{ matrix.runtime }} - if: matrix.runtime != 'win-x64' && matrix.runtime != 'win-arm64' - uses: actions/github-script@0.3.0 - with: - github-token: ${{secrets.GITHUB_TOKEN}} - script: | - const core = require('@actions/core') - const fs = require('fs'); - const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '') - var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(//g, runnerVersion).replace(//g, '${{ matrix.runtime }}') - trimmedPackages = trimmedPackages.replace(//g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}') - trimmedPackages = trimmedPackages.replace(//g, '${{hashFiles('**/_layout_trims/externals/**/*')}}') - - trimmedPackages = trimmedPackages.replace(//g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}') - trimmedPackages = trimmedPackages.replace(//g, '${{steps.sha_noruntime.outputs.sha256}}') - trimmedPackages = trimmedPackages.replace(//g, '${{steps.sha_noexternals.outputs.sha256}}') - - console.log(trimmedPackages) - fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages) # Upload runner package tar.gz/zip as artifact. # Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name @@ -216,10 +125,6 @@ jobs: name: runner-packages path: | _package - _package_trims/trim_externals - _package_trims/trim_runtime - _package_trims/trim_runtime_externals - ${{ matrix.runtime }}-trimmedpackages.json release: needs: build @@ -253,33 +158,11 @@ jobs: releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-x64-sha}}') releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-arm-sha}}') releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-arm64-sha}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.win-x64-sha-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.win-arm64-sha-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.osx-x64-sha-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.osx-arm64-sha-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-x64-sha-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-arm-sha-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.win-x64-sha-noruntime}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.win-arm64-sha-noruntime}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.osx-x64-sha-noruntime}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.osx-arm64-sha-noruntime}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-x64-sha-noruntime}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-arm-sha-noruntime}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.win-arm64-sha-noruntime-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.osx-arm64-sha-noruntime-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}') - releaseNote = releaseNote.replace(//g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}') console.log(releaseNote) core.setOutput('version', runnerVersion); core.setOutput('note', releaseNote); - name: Validate Packages HASH - working-directory: _package run: | ls -l echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c @@ -309,7 +192,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip + asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip asset_content_type: application/octet-stream @@ -319,7 +202,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip + asset_path: ${{ github.workspace }}/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip asset_content_type: application/octet-stream @@ -329,7 +212,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz + asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_content_type: application/octet-stream @@ -339,7 +222,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz + asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_content_type: application/octet-stream @@ -349,7 +232,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz + asset_path: ${{ github.workspace }}/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_content_type: application/octet-stream @@ -359,7 +242,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz + asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz asset_content_type: application/octet-stream @@ -369,298 +252,10 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz + asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_content_type: application/octet-stream - # Upload release assets (trim externals) - - name: Upload Release Asset (win-x64-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip - asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip - asset_content_type: application/octet-stream - - # Upload release assets (trim externals) - - name: Upload Release Asset (win-arm64-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip - asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-x64-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz - asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (osx-x64-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz - asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (osx-arm64-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz - asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-arm-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz - asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-arm64-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz - asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz - asset_content_type: application/octet-stream - - # Upload release assets (trim runtime) - - name: Upload Release Asset (win-x64-noruntime) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip - asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip - asset_content_type: application/octet-stream - - # Upload release assets (trim runtime) - - name: Upload Release Asset (win-arm64-noruntime) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip - asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-x64-noruntime) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz - asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (osx-x64-noruntime) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz - asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (osx-arm64-noruntime) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz - asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-arm-noruntime) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz - asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-arm64-noruntime) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz - asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz - asset_content_type: application/octet-stream - - # Upload release assets (trim runtime and externals) - - name: Upload Release Asset (win-x64-noruntime-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip - asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip - asset_content_type: application/octet-stream - - # Upload release assets (trim runtime and externals) - - name: Upload Release Asset (win-arm64-noruntime-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip - asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-x64-noruntime-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz - asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (osx-x64-noruntime-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz - asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (osx-arm64-noruntime-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz - asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-arm-noruntime-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz - asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-arm64-noruntime-noexternals) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz - asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz - asset_content_type: application/octet-stream - - # Upload release assets (trimmedpackages.json) - - name: Upload Release Asset (win-x64-trimmedpackages.json) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json - asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json - asset_content_type: application/octet-stream - - # Upload release assets (trimmedpackages.json) - - name: Upload Release Asset (win-arm64-trimmedpackages.json) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/win-arm64-trimmedpackages.json - asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-x64-trimmedpackages.json) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json - asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json - asset_content_type: application/octet-stream - - - name: Upload Release Asset (osx-x64-trimmedpackages.json) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json - asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json - asset_content_type: application/octet-stream - - - name: Upload Release Asset (osx-arm64-trimmedpackages.json) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/osx-arm64-trimmedpackages.json - asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-arm-trimmedpackages.json) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json - asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json - asset_content_type: application/octet-stream - - - name: Upload Release Asset (linux-arm64-trimmedpackages.json) - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.createRelease.outputs.upload_url }} - asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json - asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json - asset_content_type: application/octet-stream - publish-image: needs: release runs-on: ubuntu-latest diff --git a/docs/checks/actions.md b/docs/checks/actions.md index dd63fd88614..bdf3abfc1c8 100644 --- a/docs/checks/actions.md +++ b/docs/checks/actions.md @@ -7,8 +7,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente - For GitHub.com - The runner needs to access `https://api.github.com` for downloading actions. + - The runner needs to access `https://codeload.github.com` for downloading actions tar.gz/zip. - The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token. - The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs. + - The runner needs to access `https://results-receiver.actions.githubusercontent.com/.../` for reporting progress and uploading logs during a workflow job execution. --- **NOTE:** for the full list of domains that are required to be in the firewall allow list refer to the [GitHub self-hosted runners requirements documentation](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github). @@ -16,12 +18,15 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente ``` curl -v https://api.github.com/zen + curl -v https://codeload.github.com/_ping curl -v https://vstoken.actions.githubusercontent.com/_apis/health curl -v https://pipelines.actions.githubusercontent.com/_apis/health + curl -v https://results-receiver.actions.githubusercontent.com/health ``` - For GitHub Enterprise Server - The runner needs to access `https://[hostname]/api/v3` for downloading actions. + - The runner needs to access `https://codeload.[hostname]/_ping` for downloading actions tar.gz/zip. - The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token. - The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs. @@ -29,6 +34,7 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente ``` curl -v https://[hostname]/api/v3/zen + curl -v https://codeload.[hostname]/_ping curl -v https://[hostname]/_services/vstoken/_apis/health curl -v https://[hostname]/_services/pipelines/_apis/health ``` @@ -44,6 +50,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente - Ping api.github.com or myGHES.com using dotnet - Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id` --- +- DNS lookup for codeload.github.com or codeload.myGHES.com using dotnet +- Ping codeload.github.com or codeload.myGHES.com using dotnet +- Make HTTP GET to https://codeload.github.com/_ping or https://codeload.myGHES.com/_ping using dotnet, check response headers contains `X-GitHub-Request-Id` +--- - DNS lookup for vstoken.actions.githubusercontent.com using dotnet - Ping vstoken.actions.githubusercontent.com using dotnet - Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid` @@ -52,6 +62,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente - Ping pipelines.actions.githubusercontent.com using dotnet - Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid` - Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid` +--- +- DNS lookup for results-receiver.actions.githubusercontent.com using dotnet +- Ping results-receiver.actions.githubusercontent.com using dotnet +- Make HTTP GET to https://results-receiver.actions.githubusercontent.com/health using dotnet, check response headers contains `X-GitHub-Request-Id` ## How to fix the issue? diff --git a/docs/checks/network.md b/docs/checks/network.md index aaf92480f44..758618a5e90 100644 --- a/docs/checks/network.md +++ b/docs/checks/network.md @@ -42,6 +42,7 @@ If you are having trouble connecting, try these steps: - https://api.github.com/ - https://vstoken.actions.githubusercontent.com/_apis/health - https://pipelines.actions.githubusercontent.com/_apis/health + - https://results-receiver.actions.githubusercontent.com/health - For GHES/GHAE - https://myGHES.com/_services/vstoken/_apis/health - https://myGHES.com/_services/pipelines/_apis/health diff --git a/docs/start/envlinux.md b/docs/start/envlinux.md index 5fddae96f17..11eff187693 100644 --- a/docs/start/envlinux.md +++ b/docs/start/envlinux.md @@ -5,9 +5,9 @@ ## Supported Distributions and Versions x64 - - Red Hat Enterprise Linux 7 - - CentOS 7 - - Oracle Linux 7 + - Red Hat Enterprise Linux 7+ + - CentOS 7+ + - Oracle Linux 7+ - Fedora 29+ - Debian 9+ - Ubuntu 16.04+ diff --git a/images/Dockerfile b/images/Dockerfile index 12c4624b891..f95a5e47e93 100644 --- a/images/Dockerfile +++ b/images/Dockerfile @@ -4,9 +4,9 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build ARG TARGETOS ARG TARGETARCH ARG RUNNER_VERSION -ARG RUNNER_CONTAINER_HOOKS_VERSION=0.4.0 -ARG DOCKER_VERSION=24.0.6 -ARG BUILDX_VERSION=0.11.2 +ARG RUNNER_CONTAINER_HOOKS_VERSION=0.5.1 +ARG DOCKER_VERSION=25.0.2 +ARG BUILDX_VERSION=0.12.1 RUN apt update -y && apt install curl unzip -y @@ -37,6 +37,7 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy ENV DEBIAN_FRONTEND=noninteractive ENV RUNNER_MANUALLY_TRAP_SIG=1 ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1 +ENV ImageOS=ubuntu22 RUN apt-get update -y \ && apt-get install -y --no-install-recommends \ diff --git a/releaseNote.md b/releaseNote.md index 0c36cf40404..e9f86ca4f37 100644 --- a/releaseNote.md +++ b/releaseNote.md @@ -1,23 +1,23 @@ ## What's Changed -* Trim whitespace in `./Misc/contentHash/dotnetRuntime/*` by @TingluoHuang in https://github.com/actions/runner/pull/2915 -* Send os and arch during long poll by @luketomlinson in https://github.com/actions/runner/pull/2913 -* Revert "Update default version to node20 (#2844)" by @takost in https://github.com/actions/runner/pull/2918 -* Fix telemetry publish from JobServerQueue. by @TingluoHuang in https://github.com/actions/runner/pull/2919 -* Use block blob instead of append blob by @yacaovsnc in https://github.com/actions/runner/pull/2924 -* Provide detail info on untar failures. by @TingluoHuang in https://github.com/actions/runner/pull/2939 -* Bump node.js to 20.8.1 by @TingluoHuang in https://github.com/actions/runner/pull/2945 -* Update dotnet sdk to latest version @6.0.415 by @github-actions in https://github.com/actions/runner/pull/2929 -* Fix typo in log strings by @rajbos in https://github.com/actions/runner/pull/2695 -* feat: add support of arm64 arch runners in service creation script by @tuxity in https://github.com/actions/runner/pull/2606 -* Add `buildx` to images by @ajschmidt8 in https://github.com/actions/runner/pull/2901 +* Prepare v2.313.0 Release by @luketomlinson in https://github.com/actions/runner/pull/3137 +* Pass RunnerOS during job acquire. by @TingluoHuang in https://github.com/actions/runner/pull/3140 +* Process `snapshot` tokens by @davidomid in https://github.com/actions/runner/pull/3135 +* Update dotnet sdk to latest version @6.0.419 by @github-actions in https://github.com/actions/runner/pull/3158 +* handle broker run service exception handling by @yaananth in https://github.com/actions/runner/pull/3163 +* Add a retry logic to docker login operation by @enescakir in https://github.com/actions/runner/pull/3089 +* Broker fixes for token refreshes and AccessDeniedException by @luketomlinson in https://github.com/actions/runner/pull/3161 +* Remove USE_BROKER_FLOW by @luketomlinson in https://github.com/actions/runner/pull/3162 +* Refresh Token for BrokerServer by @luketomlinson in https://github.com/actions/runner/pull/3167 +* Better step timeout message. by @TingluoHuang in https://github.com/actions/runner/pull/3166 ## New Contributors -* @tuxity made their first contribution in https://github.com/actions/runner/pull/2606 +* @davidomid made their first contribution in https://github.com/actions/runner/pull/3135 +* @enescakir made their first contribution in https://github.com/actions/runner/pull/3089 -**Full Changelog**: https://github.com/actions/runner/compare/v2.310.2...v2.311.0 +**Full Changelog**: https://github.com/actions/runner/compare/v2.313.0...v2.314.0 -_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet. -To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository. +_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet. +To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository. See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_ ## Windows x64 @@ -119,27 +119,3 @@ The SHA-256 checksums for the packages included in this build are shown below: - actions-runner-linux-x64-.tar.gz - actions-runner-linux-arm64-.tar.gz - actions-runner-linux-arm-.tar.gz - -- actions-runner-win-x64--noexternals.zip -- actions-runner-win-arm64--noexternals.zip -- actions-runner-osx-x64--noexternals.tar.gz -- actions-runner-osx-arm64--noexternals.tar.gz -- actions-runner-linux-x64--noexternals.tar.gz -- actions-runner-linux-arm64--noexternals.tar.gz -- actions-runner-linux-arm--noexternals.tar.gz - -- actions-runner-win-x64--noruntime.zip -- actions-runner-win-arm64--noruntime.zip -- actions-runner-osx-x64--noruntime.tar.gz -- actions-runner-osx-arm64--noruntime.tar.gz -- actions-runner-linux-x64--noruntime.tar.gz -- actions-runner-linux-arm64--noruntime.tar.gz -- actions-runner-linux-arm--noruntime.tar.gz - -- actions-runner-win-x64--noruntime-noexternals.zip -- actions-runner-win-arm64--noruntime-noexternals.zip -- actions-runner-osx-x64--noruntime-noexternals.tar.gz -- actions-runner-osx-arm64--noruntime-noexternals.tar.gz -- actions-runner-linux-x64--noruntime-noexternals.tar.gz -- actions-runner-linux-arm64--noruntime-noexternals.tar.gz -- actions-runner-linux-arm--noruntime-noexternals.tar.gz diff --git a/src/Misc/contentHash/dotnetRuntime/linux-arm b/src/Misc/contentHash/dotnetRuntime/linux-arm index c750b23ed50..9f55d62ef2a 100644 --- a/src/Misc/contentHash/dotnetRuntime/linux-arm +++ b/src/Misc/contentHash/dotnetRuntime/linux-arm @@ -1 +1 @@ -531b31914e525ecb12cc5526415bc70a112ebc818f877347af1a231011f539c5 \ No newline at end of file +54d95a44d118dba852395991224a6b9c1abe916858c87138656f80c619e85331 \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/linux-arm64 b/src/Misc/contentHash/dotnetRuntime/linux-arm64 index 3380d9dcbfa..c03c98ade6c 100644 --- a/src/Misc/contentHash/dotnetRuntime/linux-arm64 +++ b/src/Misc/contentHash/dotnetRuntime/linux-arm64 @@ -1 +1 @@ -722dd5fa5ecc207fcccf67f6e502d689f2119d8117beff2041618fba17dc66a4 \ No newline at end of file +68015af17f06a824fa478e62ae7393766ce627fd5599ab916432a14656a19a52 \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/linux-x64 b/src/Misc/contentHash/dotnetRuntime/linux-x64 index b2f1fc1a743..95a7155f74d 100644 --- a/src/Misc/contentHash/dotnetRuntime/linux-x64 +++ b/src/Misc/contentHash/dotnetRuntime/linux-x64 @@ -1 +1 @@ -8ca75c76e15ab9dc7fe49a66c5c74e171e7fabd5d26546fda8931bd11bff30f9 \ No newline at end of file +a2628119ca419cb54e279103ffae7986cdbd0814d57c73ff0dc74c38be08b9ae \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/osx-arm64 b/src/Misc/contentHash/dotnetRuntime/osx-arm64 index 783fa8b5599..d99ff5942f0 100644 --- a/src/Misc/contentHash/dotnetRuntime/osx-arm64 +++ b/src/Misc/contentHash/dotnetRuntime/osx-arm64 @@ -1 +1 @@ -70496eb1c99b39b3373b5088c95a35ebbaac1098e6c47c8aab94771f3ffbf501 \ No newline at end of file +de71ca09ead807e1a2ce9df0a5b23eb7690cb71fff51169a77e4c3992be53dda \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/osx-x64 b/src/Misc/contentHash/dotnetRuntime/osx-x64 index f593273294e..085b329b2a0 100644 --- a/src/Misc/contentHash/dotnetRuntime/osx-x64 +++ b/src/Misc/contentHash/dotnetRuntime/osx-x64 @@ -1 +1 @@ -4f8d48727d535daabcaec814e0dafb271c10625366c78e7e022ca7477a73023f \ No newline at end of file +d009e05e6b26d614d65be736a15d1bd151932121c16a9ff1b986deadecc982b9 \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/win-arm64 b/src/Misc/contentHash/dotnetRuntime/win-arm64 index d050cb89ef2..5c84f556e8d 100644 --- a/src/Misc/contentHash/dotnetRuntime/win-arm64 +++ b/src/Misc/contentHash/dotnetRuntime/win-arm64 @@ -1 +1 @@ -d54d7428f2b9200a0030365a6a4e174e30a1b29b922f8254dffb2924bd09549d \ No newline at end of file +f730db39c2305800b4653795360ba9c10c68f384a46b85d808f1f9f0ed3c42e4 \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/win-x64 b/src/Misc/contentHash/dotnetRuntime/win-x64 index 881293ccbd4..6be8253b146 100644 --- a/src/Misc/contentHash/dotnetRuntime/win-x64 +++ b/src/Misc/contentHash/dotnetRuntime/win-x64 @@ -1 +1 @@ -eaa939c45307f46b7003902255b3a2a09287215d710984107667e03ac493eb26 \ No newline at end of file +a35b5722375490e9473cdcccb5e18b41eba3dbf4344fe31abc9821e21f18ea5a \ No newline at end of file diff --git a/src/Misc/externals.sh b/src/Misc/externals.sh index 997d132b4ff..383221e4452 100755 --- a/src/Misc/externals.sh +++ b/src/Misc/externals.sh @@ -63,17 +63,16 @@ function acquireExternalTool() { echo "Curl version: $CURL_VERSION" # curl -f Fail silently (no output at all) on HTTP errors (H) - # -k Allow connections to SSL sites without certs (H) # -S Show error. With -s, make curl show errors when they occur # -L Follow redirects (H) # -o FILE Write to FILE instead of stdout # --retry 3 Retries transient errors 3 times (timeouts, 5xx) if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then # Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag - curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl' + curl -fSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl' else # Curl version is greater than 7.71.0, running curl with --retry-all-errors flag - curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl' + curl -fSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl' fi # Move the partial file to the download target. diff --git a/src/Misc/runnercoreassets b/src/Misc/runnercoreassets deleted file mode 100644 index 34f873d5b63..00000000000 --- a/src/Misc/runnercoreassets +++ /dev/null @@ -1,57 +0,0 @@ -actions.runner.plist.template -actions.runner.service.template -checkScripts/downloadCert.js -checkScripts/makeWebRequest.js -darwin.svc.sh.template -hashFiles/index.js -installdependencies.sh -macos-run-invoker.js -Microsoft.IdentityModel.Logging.dll -Microsoft.IdentityModel.Tokens.dll -Minimatch.dll -Newtonsoft.Json.Bson.dll -Newtonsoft.Json.dll -Runner.Common.deps.json -Runner.Common.dll -Runner.Common.pdb -Runner.Listener -Runner.Listener.deps.json -Runner.Listener.dll -Runner.Listener.exe -Runner.Listener.pdb -Runner.Listener.runtimeconfig.json -Runner.PluginHost -Runner.PluginHost.deps.json -Runner.PluginHost.dll -Runner.PluginHost.exe -Runner.PluginHost.pdb -Runner.PluginHost.runtimeconfig.json -Runner.Plugins.deps.json -Runner.Plugins.dll -Runner.Plugins.pdb -Runner.Sdk.deps.json -Runner.Sdk.dll -Runner.Sdk.pdb -Runner.Worker -Runner.Worker.deps.json -Runner.Worker.dll -Runner.Worker.exe -Runner.Worker.pdb -Runner.Worker.runtimeconfig.json -RunnerService.exe -RunnerService.exe.config -RunnerService.js -RunnerService.pdb -runsvc.sh -Sdk.deps.json -Sdk.dll -Sdk.pdb -System.IdentityModel.Tokens.Jwt.dll -System.Net.Http.Formatting.dll -System.Security.Cryptography.Pkcs.dll -System.Security.Cryptography.ProtectedData.dll -System.ServiceProcess.ServiceController.dll -systemd.svc.sh.template -update.cmd.template -update.sh.template -YamlDotNet.dll \ No newline at end of file diff --git a/src/Misc/runnerdotnetruntimeassets b/src/Misc/runnerdotnetruntimeassets deleted file mode 100644 index 3d9d1ea0a57..00000000000 --- a/src/Misc/runnerdotnetruntimeassets +++ /dev/null @@ -1,270 +0,0 @@ -api-ms-win-core-console-l1-1-0.dll -api-ms-win-core-console-l1-2-0.dll -api-ms-win-core-datetime-l1-1-0.dll -api-ms-win-core-debug-l1-1-0.dll -api-ms-win-core-errorhandling-l1-1-0.dll -api-ms-win-core-fibers-l1-1-0.dll -api-ms-win-core-file-l1-1-0.dll -api-ms-win-core-file-l1-2-0.dll -api-ms-win-core-file-l2-1-0.dll -api-ms-win-core-handle-l1-1-0.dll -api-ms-win-core-heap-l1-1-0.dll -api-ms-win-core-interlocked-l1-1-0.dll -api-ms-win-core-libraryloader-l1-1-0.dll -api-ms-win-core-localization-l1-2-0.dll -api-ms-win-core-memory-l1-1-0.dll -api-ms-win-core-namedpipe-l1-1-0.dll -api-ms-win-core-processenvironment-l1-1-0.dll -api-ms-win-core-processthreads-l1-1-0.dll -api-ms-win-core-processthreads-l1-1-1.dll -api-ms-win-core-profile-l1-1-0.dll -api-ms-win-core-rtlsupport-l1-1-0.dll -api-ms-win-core-string-l1-1-0.dll -api-ms-win-core-synch-l1-1-0.dll -api-ms-win-core-synch-l1-2-0.dll -api-ms-win-core-sysinfo-l1-1-0.dll -api-ms-win-core-timezone-l1-1-0.dll -api-ms-win-core-util-l1-1-0.dll -api-ms-win-crt-conio-l1-1-0.dll -api-ms-win-crt-convert-l1-1-0.dll -api-ms-win-crt-environment-l1-1-0.dll -api-ms-win-crt-filesystem-l1-1-0.dll -api-ms-win-crt-heap-l1-1-0.dll -api-ms-win-crt-locale-l1-1-0.dll -api-ms-win-crt-math-l1-1-0.dll -api-ms-win-crt-multibyte-l1-1-0.dll -api-ms-win-crt-private-l1-1-0.dll -api-ms-win-crt-process-l1-1-0.dll -api-ms-win-crt-runtime-l1-1-0.dll -api-ms-win-crt-stdio-l1-1-0.dll -api-ms-win-crt-string-l1-1-0.dll -api-ms-win-crt-time-l1-1-0.dll -api-ms-win-crt-utility-l1-1-0.dll -clrcompression.dll -clretwrc.dll -clrjit.dll -coreclr.dll -createdump -createdump.exe -dbgshim.dll -hostfxr.dll -hostpolicy.dll -libclrjit.dylib -libclrjit.so -libcoreclr.dylib -libcoreclr.so -libcoreclrtraceptprovider.so -libdbgshim.dylib -libdbgshim.so -libhostfxr.dylib -libhostfxr.so -libhostpolicy.dylib -libhostpolicy.so -libmscordaccore.dylib -libmscordaccore.so -libmscordbi.dylib -libmscordbi.so -Microsoft.CSharp.dll -Microsoft.DiaSymReader.Native.amd64.dll -Microsoft.DiaSymReader.Native.arm64.dll -Microsoft.VisualBasic.Core.dll -Microsoft.VisualBasic.dll -Microsoft.Win32.Primitives.dll -Microsoft.Win32.Registry.dll -mscordaccore.dll -mscordaccore_amd64_amd64_6.0.522.21309.dll -mscordaccore_arm64_arm64_6.0.522.21309.dll -mscordaccore_amd64_amd64_6.0.1322.58009.dll -mscordaccore_amd64_amd64_6.0.2023.32017.dll -mscordaccore_amd64_amd64_6.0.2223.42425.dll -mscordaccore_amd64_amd64_6.0.2323.48002.dll -mscordbi.dll -mscorlib.dll -mscorrc.debug.dll -mscorrc.dll -msquic.dll -netstandard.dll -SOS_README.md -System.AppContext.dll -System.Buffers.dll -System.Collections.Concurrent.dll -System.Collections.dll -System.Collections.Immutable.dll -System.Collections.NonGeneric.dll -System.Collections.Specialized.dll -System.ComponentModel.Annotations.dll -System.ComponentModel.DataAnnotations.dll -System.ComponentModel.dll -System.ComponentModel.EventBasedAsync.dll -System.ComponentModel.Primitives.dll -System.ComponentModel.TypeConverter.dll -System.Configuration.dll -System.Console.dll -System.Core.dll -System.Data.Common.dll -System.Data.DataSetExtensions.dll -System.Data.dll -System.Diagnostics.Contracts.dll -System.Diagnostics.Debug.dll -System.Diagnostics.DiagnosticSource.dll -System.Diagnostics.FileVersionInfo.dll -System.Diagnostics.Process.dll -System.Diagnostics.StackTrace.dll -System.Diagnostics.TextWriterTraceListener.dll -System.Diagnostics.Tools.dll -System.Diagnostics.TraceSource.dll -System.Diagnostics.Tracing.dll -System.dll -System.Drawing.dll -System.Drawing.Primitives.dll -System.Dynamic.Runtime.dll -System.Formats.Asn1.dll -System.Globalization.Calendars.dll -System.Globalization.dll -System.Globalization.Extensions.dll -System.Globalization.Native.dylib -System.Globalization.Native.so -System.IO.Compression.Brotli.dll -System.IO.Compression.dll -System.IO.Compression.FileSystem.dll -System.IO.Compression.Native.a -System.IO.Compression.Native.dll -System.IO.Compression.Native.dylib -System.IO.Compression.Native.so -System.IO.Compression.ZipFile.dll -System.IO.dll -System.IO.FileSystem.AccessControl.dll -System.IO.FileSystem.dll -System.IO.FileSystem.DriveInfo.dll -System.IO.FileSystem.Primitives.dll -System.IO.FileSystem.Watcher.dll -System.IO.IsolatedStorage.dll -System.IO.MemoryMappedFiles.dll -System.IO.Pipes.AccessControl.dll -System.IO.Pipes.dll -System.IO.UnmanagedMemoryStream.dll -System.Linq.dll -System.Linq.Expressions.dll -System.Linq.Parallel.dll -System.Linq.Queryable.dll -System.Memory.dll -System.Native.a -System.Native.dylib -System.Native.so -System.Net.dll -System.Net.Http.dll -System.Net.Http.Json.dll -System.Net.Http.Native.a -System.Net.Http.Native.dylib -System.Net.Http.Native.so -System.Net.HttpListener.dll -System.Net.Mail.dll -System.Net.NameResolution.dll -System.Net.NetworkInformation.dll -System.Net.Ping.dll -System.Net.Primitives.dll -System.Net.Quic.dll -System.Net.Requests.dll -System.Net.Security.dll -System.Net.Security.Native.a -System.Net.Security.Native.dylib -System.Net.Security.Native.so -System.Net.ServicePoint.dll -System.Net.Sockets.dll -System.Net.WebClient.dll -System.Net.WebHeaderCollection.dll -System.Net.WebProxy.dll -System.Net.WebSockets.Client.dll -System.Net.WebSockets.dll -System.Numerics.dll -System.Numerics.Vectors.dll -System.ObjectModel.dll -System.Private.CoreLib.dll -System.Private.DataContractSerialization.dll -System.Private.Uri.dll -System.Private.Xml.dll -System.Private.Xml.Linq.dll -System.Reflection.DispatchProxy.dll -System.Reflection.dll -System.Reflection.Emit.dll -System.Reflection.Emit.ILGeneration.dll -System.Reflection.Emit.Lightweight.dll -System.Reflection.Extensions.dll -System.Reflection.Metadata.dll -System.Reflection.Primitives.dll -System.Reflection.TypeExtensions.dll -System.Resources.Reader.dll -System.Resources.ResourceManager.dll -System.Resources.Writer.dll -System.Runtime.CompilerServices.Unsafe.dll -System.Runtime.CompilerServices.VisualC.dll -System.Runtime.dll -System.Runtime.Extensions.dll -System.Runtime.Handles.dll -System.Runtime.InteropServices.dll -System.Runtime.InteropServices.RuntimeInformation.dll -System.Runtime.InteropServices.WindowsRuntime.dll -System.Runtime.Intrinsics.dll -System.Runtime.Loader.dll -System.Runtime.Numerics.dll -System.Runtime.Serialization.dll -System.Runtime.Serialization.Formatters.dll -System.Runtime.Serialization.Json.dll -System.Runtime.Serialization.Primitives.dll -System.Runtime.Serialization.Xml.dll -System.Runtime.WindowsRuntime.dll -System.Runtime.WindowsRuntime.UI.Xaml.dll -System.Security.AccessControl.dll -System.Security.Claims.dll -System.Security.Cryptography.Algorithms.dll -System.Security.Cryptography.Cng.dll -System.Security.Cryptography.Csp.dll -System.Security.Cryptography.Encoding.dll -System.Security.Cryptography.Native.Apple.a -System.Security.Cryptography.Native.Apple.dylib -System.Security.Cryptography.Native.OpenSsl.a -System.Security.Cryptography.Native.OpenSsl.dylib -System.Security.Cryptography.Native.OpenSsl.so -System.Security.Cryptography.OpenSsl.dll -System.Security.Cryptography.Primitives.dll -System.Security.Cryptography.X509Certificates.dll -System.Security.Cryptography.XCertificates.dll -System.Security.dll -System.Security.Principal.dll -System.Security.Principal.Windows.dll -System.Security.SecureString.dll -System.ServiceModel.Web.dll -System.ServiceProcess.dll -System.Text.Encoding.CodePages.dll -System.Text.Encoding.dll -System.Text.Encoding.Extensions.dll -System.Text.Encodings.Web.dll -System.Text.Json.dll -System.Text.RegularExpressions.dll -System.Threading.Channels.dll -System.Threading.dll -System.Threading.Overlapped.dll -System.Threading.Tasks.Dataflow.dll -System.Threading.Tasks.dll -System.Threading.Tasks.Extensions.dll -System.Threading.Tasks.Parallel.dll -System.Threading.Thread.dll -System.Threading.ThreadPool.dll -System.Threading.Timer.dll -System.Transactions.dll -System.Transactions.Local.dll -System.ValueTuple.dll -System.Web.dll -System.Web.HttpUtility.dll -System.Windows.dll -System.Xml.dll -System.Xml.Linq.dll -System.Xml.ReaderWriter.dll -System.Xml.Serialization.dll -System.Xml.XDocument.dll -System.Xml.XmlDocument.dll -System.Xml.XmlSerializer.dll -System.Xml.XPath.dll -System.Xml.XPath.XDocument.dll -ucrtbase.dll -WindowsBase.dll diff --git a/src/Misc/trimmedpackages_targz.json b/src/Misc/trimmedpackages_targz.json deleted file mode 100644 index 3a28b282d0b..00000000000 --- a/src/Misc/trimmedpackages_targz.json +++ /dev/null @@ -1,24 +0,0 @@ -[ - { - "HashValue": "", - "DownloadUrl": "https://github.com/actions/runner/releases/download/v/actions-runner---noruntime-noexternals.tar.gz", - "TrimmedContents": { - "dotnetRuntime": "", - "externals": "" - } - }, - { - "HashValue": "", - "DownloadUrl": "https://github.com/actions/runner/releases/download/v/actions-runner---noruntime.tar.gz", - "TrimmedContents": { - "dotnetRuntime": "" - } - }, - { - "HashValue": "", - "DownloadUrl": "https://github.com/actions/runner/releases/download/v/actions-runner---noexternals.tar.gz", - "TrimmedContents": { - "externals": "" - } - } -] \ No newline at end of file diff --git a/src/Misc/trimmedpackages_zip.json b/src/Misc/trimmedpackages_zip.json deleted file mode 100644 index 423bca46bf0..00000000000 --- a/src/Misc/trimmedpackages_zip.json +++ /dev/null @@ -1,24 +0,0 @@ -[ - { - "HashValue": "", - "DownloadUrl": "https://github.com/actions/runner/releases/download/v/actions-runner---noruntime-noexternals.zip", - "TrimmedContents": { - "dotnetRuntime": "", - "externals": "" - } - }, - { - "HashValue": "", - "DownloadUrl": "https://github.com/actions/runner/releases/download/v/actions-runner---noruntime.zip", - "TrimmedContents": { - "dotnetRuntime": "" - } - }, - { - "HashValue": "", - "DownloadUrl": "https://github.com/actions/runner/releases/download/v/actions-runner---noexternals.zip", - "TrimmedContents": { - "externals": "" - } - } -] \ No newline at end of file diff --git a/src/Runner.Common/BrokerServer.cs b/src/Runner.Common/BrokerServer.cs index 9d5287a2f35..5e1311715c5 100644 --- a/src/Runner.Common/BrokerServer.cs +++ b/src/Runner.Common/BrokerServer.cs @@ -17,7 +17,14 @@ public interface IBrokerServer : IRunnerService { Task ConnectAsync(Uri serverUrl, VssCredentials credentials); - Task GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate); + Task CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken); + Task DeleteSessionAsync(CancellationToken cancellationToken); + + Task GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token); + + Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials); + + Task ForceRefreshConnection(VssCredentials credentials); } public sealed class BrokerServer : RunnerService, IBrokerServer @@ -44,13 +51,53 @@ private void CheckConnection() } } - public Task GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate) + public async Task CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken) { CheckConnection(); - var jobMessage = RetryRequest( - async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken); + var jobMessage = await _brokerHttpClient.CreateSessionAsync(session, cancellationToken); return jobMessage; } + + public Task GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) + { + CheckConnection(); + var brokerSession = RetryRequest( + async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException); + + + return brokerSession; + } + + public async Task DeleteSessionAsync(CancellationToken cancellationToken) + { + CheckConnection(); + await _brokerHttpClient.DeleteSessionAsync(cancellationToken); + } + + public Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials) + { + if (_brokerUri != serverUri || !_hasConnection) + { + return ConnectAsync(serverUri, credentials); + } + + return Task.CompletedTask; + } + + public Task ForceRefreshConnection(VssCredentials credentials) + { + return ConnectAsync(_brokerUri, credentials); + } + + public bool ShouldRetryException(Exception ex) + { + if (ex is AccessDeniedException ade && ade.ErrorCode == 1) + { + return false; + } + + return true; + } } } diff --git a/src/Runner.Common/HostContext.cs b/src/Runner.Common/HostContext.cs index 52cd3673438..78ea8ba4cbe 100644 --- a/src/Runner.Common/HostContext.cs +++ b/src/Runner.Common/HostContext.cs @@ -200,6 +200,10 @@ public HostContext(string hostType, string logFile = null) { _trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)"); } + else + { + _userAgents.Add(new ProductInfoHeaderValue("HttpProxyConfigured", bool.TrueString)); + } if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY"))) { diff --git a/src/Runner.Common/JobServerQueue.cs b/src/Runner.Common/JobServerQueue.cs index e6a00f1c823..c1425b80721 100644 --- a/src/Runner.Common/JobServerQueue.cs +++ b/src/Runner.Common/JobServerQueue.cs @@ -134,8 +134,8 @@ public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServi { liveConsoleFeedUrl = feedStreamUrl; } - - _resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken); + jobRequest.Variables.TryGetValue("system.github.results_upload_with_sdk", out VariableValue resultsUseSdkVariable); + _resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken, StringUtil.ConvertToBoolean(resultsUseSdkVariable?.Value)); _resultsClientInitiated = true; } @@ -551,6 +551,10 @@ private async Task ProcessResultsUploadQueueAsync(bool runOnce = false) { await UploadSummaryFile(file); } + if (string.Equals(file.Type, CoreAttachmentType.ResultsDiagnosticLog, StringComparison.OrdinalIgnoreCase)) + { + await UploadResultsDiagnosticLogsFile(file); + } else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase)) { if (file.RecordId != _jobTimelineRecordId) @@ -922,6 +926,17 @@ private async Task UploadSummaryFile(ResultsUploadFileInfo file) await UploadResultsFile(file, summaryHandler); } + private async Task UploadResultsDiagnosticLogsFile(ResultsUploadFileInfo file) + { + Trace.Info($"Starting to upload diagnostic logs file to results service {file.Name}, {file.Path}"); + ResultsFileUploadHandler diagnosticLogsHandler = async (file) => + { + await _resultsServer.CreateResultsDiagnosticLogsAsync(file.PlanId, file.JobId, file.Path, CancellationToken.None); + }; + + await UploadResultsFile(file, diagnosticLogsHandler); + } + private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file) { Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}"); diff --git a/src/Runner.Common/ResultsServer.cs b/src/Runner.Common/ResultsServer.cs index ef97ebcfc65..8a1b35948c3 100644 --- a/src/Runner.Common/ResultsServer.cs +++ b/src/Runner.Common/ResultsServer.cs @@ -19,7 +19,7 @@ namespace GitHub.Runner.Common [ServiceLocator(Default = typeof(ResultServer))] public interface IResultsServer : IRunnerService, IAsyncDisposable { - void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token); + void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk); Task AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList lines, long? startLine, CancellationToken cancellationToken); @@ -35,6 +35,8 @@ Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool fin Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable records, CancellationToken cancellationToken); + + Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file, CancellationToken cancellationToken); } public sealed class ResultServer : RunnerService, IResultsServer @@ -51,9 +53,9 @@ public sealed class ResultServer : RunnerService, IResultsServer private String _liveConsoleFeedUrl; private string _token; - public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token) + public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk) { - this._resultsClient = CreateHttpClient(uri, token); + this._resultsClient = CreateHttpClient(uri, token, useSdk); _token = token; if (!string.IsNullOrEmpty(liveConsoleFeedUrl)) @@ -63,7 +65,7 @@ public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string t } } - public ResultsHttpClient CreateHttpClient(Uri uri, string token) + public ResultsHttpClient CreateHttpClient(Uri uri, string token, bool useSdk) { // Using default 100 timeout RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null); @@ -80,7 +82,7 @@ public ResultsHttpClient CreateHttpClient(Uri uri, string token) var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers); - return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true); + return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true, useSdk: useSdk); } public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file, @@ -141,6 +143,18 @@ public Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName throw new InvalidOperationException("Results client is not initialized."); } + public Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file, + CancellationToken cancellationToken) + { + if (_resultsClient != null) + { + return _resultsClient.UploadResultsDiagnosticLogsAsync(planId, jobId, file, + cancellationToken: cancellationToken); + } + + throw new InvalidOperationException("Results client is not initialized."); + } + public ValueTask DisposeAsync() { CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None); diff --git a/src/Runner.Common/RunServer.cs b/src/Runner.Common/RunServer.cs index fbd9ff96abd..c042796b124 100644 --- a/src/Runner.Common/RunServer.cs +++ b/src/Runner.Common/RunServer.cs @@ -5,6 +5,7 @@ using GitHub.Actions.RunService.WebApi; using GitHub.DistributedTask.Pipelines; using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; using GitHub.Runner.Sdk; using GitHub.Services.Common; using Sdk.RSWebApi.Contracts; @@ -60,7 +61,7 @@ public Task GetJobMessageAsync(string id, CancellationTo { CheckConnection(); return RetryRequest( - async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken, + async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, VarUtil.OS, cancellationToken), cancellationToken, shouldRetry: ex => ex is not TaskOrchestrationJobAlreadyAcquiredException); } diff --git a/src/Runner.Listener/BrokerMessageListener.cs b/src/Runner.Listener/BrokerMessageListener.cs index f3fd33fd61e..6767d0beb39 100644 --- a/src/Runner.Listener/BrokerMessageListener.cs +++ b/src/Runner.Listener/BrokerMessageListener.cs @@ -24,7 +24,15 @@ public sealed class BrokerMessageListener : RunnerService, IMessageListener private TimeSpan _getNextMessageRetryInterval; private TaskAgentStatus runnerStatus = TaskAgentStatus.Online; private CancellationTokenSource _getMessagesTokenSource; + private VssCredentials _creds; + private TaskAgentSession _session; private IBrokerServer _brokerServer; + private readonly Dictionary _sessionCreationExceptionTracker = new(); + private bool _accessTokenRevoked = false; + private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30); + private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4); + private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30); + public override void Initialize(IHostContext hostContext) { @@ -36,13 +44,134 @@ public override void Initialize(IHostContext hostContext) public async Task CreateSessionAsync(CancellationToken token) { - await RefreshBrokerConnection(); - return await Task.FromResult(true); + Trace.Entering(); + + // Settings + var configManager = HostContext.GetService(); + _settings = configManager.LoadSettings(); + var serverUrl = _settings.ServerUrlV2; + Trace.Info(_settings); + + if (string.IsNullOrEmpty(_settings.ServerUrlV2)) + { + throw new InvalidOperationException("ServerUrlV2 is not set"); + } + + // Create connection. + Trace.Info("Loading Credentials"); + var credMgr = HostContext.GetService(); + _creds = credMgr.LoadCredentials(); + + var agent = new TaskAgentReference + { + Id = _settings.AgentId, + Name = _settings.AgentName, + Version = BuildConstants.RunnerPackage.Version, + OSDescription = RuntimeInformation.OSDescription, + }; + string sessionName = $"{Environment.MachineName ?? "RUNNER"}"; + var taskAgentSession = new TaskAgentSession(sessionName, agent); + + string errorMessage = string.Empty; + bool encounteringError = false; + + while (true) + { + token.ThrowIfCancellationRequested(); + Trace.Info($"Attempt to create session."); + try + { + Trace.Info("Connecting to the Broker Server..."); + await _brokerServer.ConnectAsync(new Uri(serverUrl), _creds); + Trace.Info("VssConnection created"); + + _term.WriteLine(); + _term.WriteSuccessMessage("Connected to GitHub"); + _term.WriteLine(); + + _session = await _brokerServer.CreateSessionAsync(taskAgentSession, token); + + Trace.Info($"Session created."); + if (encounteringError) + { + _term.WriteLine($"{DateTime.UtcNow:u}: Runner reconnected."); + _sessionCreationExceptionTracker.Clear(); + encounteringError = false; + } + + return true; + } + catch (OperationCanceledException) when (token.IsCancellationRequested) + { + Trace.Info("Session creation has been cancelled."); + throw; + } + catch (TaskAgentAccessTokenExpiredException) + { + Trace.Info("Runner OAuth token has been revoked. Session creation failed."); + _accessTokenRevoked = true; + throw; + } + catch (Exception ex) + { + Trace.Error("Catch exception during create session."); + Trace.Error(ex); + + if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred) + { + // "invalid_client" means the runner registration has been deleted from the server. + if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase)) + { + _term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently."); + return false; + } + + // Check whether we get 401 because the runner registration already removed by the service. + // If the runner registration get deleted, we can't exchange oauth token. + Trace.Error("Test oauth app registration."); + var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl)); + var authError = await oauthTokenProvider.ValidateCredentialAsync(token); + if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase)) + { + _term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently."); + return false; + } + } + + if (!IsSessionCreationExceptionRetriable(ex)) + { + _term.WriteError($"Failed to create session. {ex.Message}"); + return false; + } + + if (!encounteringError) //print the message only on the first error + { + _term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected."); + encounteringError = true; + } + + Trace.Info("Sleeping for {0} seconds before retrying.", _sessionCreationRetryInterval.TotalSeconds); + await HostContext.Delay(_sessionCreationRetryInterval, token); + } + } } public async Task DeleteSessionAsync() { - await Task.CompletedTask; + if (_session != null && _session.SessionId != Guid.Empty) + { + if (!_accessTokenRevoked) + { + using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30))) + { + await _brokerServer.DeleteSessionAsync(ts.Token); + } + } + else + { + Trace.Warning("Runner OAuth token has been revoked. Skip deleting session."); + } + } } public void OnJobStatus(object sender, JobStatusEventArgs e) @@ -73,12 +202,13 @@ public async Task GetNextMessageAsync(CancellationToken token) _getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token); try { - message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, + message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId, runnerStatus, BuildConstants.RunnerPackage.Version, VarUtil.OS, VarUtil.OSArchitecture, - _settings.DisableUpdate); + _settings.DisableUpdate, + _getMessagesTokenSource.Token); if (message == null) { @@ -143,7 +273,7 @@ public async Task GetNextMessageAsync(CancellationToken token) } // re-create VssConnection before next retry - await RefreshBrokerConnection(); + await RefreshBrokerConnectionAsync(); Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds); await HostContext.Delay(_getNextMessageRetryInterval, token); @@ -173,6 +303,11 @@ public async Task GetNextMessageAsync(CancellationToken token) } } + public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken) + { + await RefreshBrokerConnectionAsync(); + } + public async Task DeleteMessageAsync(TaskAgentMessage message) { await Task.CompletedTask; @@ -196,12 +331,84 @@ ex is AccessDeniedException || } } - private async Task RefreshBrokerConnection() + private bool IsSessionCreationExceptionRetriable(Exception ex) + { + if (ex is TaskAgentNotFoundException) + { + Trace.Info("The runner no longer exists on the server. Stopping the runner."); + _term.WriteError("The runner no longer exists on the server. Please reconfigure the runner."); + return false; + } + else if (ex is TaskAgentSessionConflictException) + { + Trace.Info("The session for this runner already exists."); + _term.WriteError("A session for this runner already exists."); + if (_sessionCreationExceptionTracker.ContainsKey(nameof(TaskAgentSessionConflictException))) + { + _sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)]++; + if (_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] * _sessionCreationRetryInterval.TotalSeconds >= _sessionConflictRetryLimit.TotalSeconds) + { + Trace.Info("The session conflict exception have reached retry limit."); + _term.WriteError($"Stop retry on SessionConflictException after retried for {_sessionConflictRetryLimit.TotalSeconds} seconds."); + return false; + } + } + else + { + _sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] = 1; + } + + Trace.Info("The session conflict exception haven't reached retry limit."); + return true; + } + else if (ex is VssOAuthTokenRequestException && ex.Message.Contains("Current server time is")) + { + Trace.Info("Local clock might be skewed."); + _term.WriteError("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again."); + if (_sessionCreationExceptionTracker.ContainsKey(nameof(VssOAuthTokenRequestException))) + { + _sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)]++; + if (_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] * _sessionCreationRetryInterval.TotalSeconds >= _clockSkewRetryLimit.TotalSeconds) + { + Trace.Info("The OAuth token request exception have reached retry limit."); + _term.WriteError($"Stopped retrying OAuth token request exception after {_clockSkewRetryLimit.TotalSeconds} seconds."); + return false; + } + } + else + { + _sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] = 1; + } + + Trace.Info("The OAuth token request exception haven't reached retry limit."); + return true; + } + else if (ex is TaskAgentPoolNotFoundException || + ex is AccessDeniedException || + ex is VssUnauthorizedException) + { + Trace.Info($"Non-retriable exception: {ex.Message}"); + return false; + } + + else if (ex is InvalidOperationException) + { + Trace.Info($"Non-retriable exception: {ex.Message}"); + return false; + } + else + { + Trace.Info($"Retriable exception: {ex.Message}"); + return true; + } + } + + private async Task RefreshBrokerConnectionAsync() { var configManager = HostContext.GetService(); _settings = configManager.LoadSettings(); - if (_settings.ServerUrlV2 == null) + if (string.IsNullOrEmpty(_settings.ServerUrlV2)) { throw new InvalidOperationException("ServerUrlV2 is not set"); } diff --git a/src/Runner.Listener/Checks/ActionsCheck.cs b/src/Runner.Listener/Checks/ActionsCheck.cs index ac85d2559ce..4d4d5e42b09 100644 --- a/src/Runner.Listener/Checks/ActionsCheck.cs +++ b/src/Runner.Listener/Checks/ActionsCheck.cs @@ -39,6 +39,7 @@ public async Task RunCheck(string url, string pat) string githubApiUrl = null; string actionsTokenServiceUrl = null; string actionsPipelinesServiceUrl = null; + string resultsReceiverServiceUrl = null; var urlBuilder = new UriBuilder(url); if (UrlUtil.IsHostedServer(urlBuilder)) { @@ -47,6 +48,7 @@ public async Task RunCheck(string url, string pat) githubApiUrl = urlBuilder.Uri.AbsoluteUri; actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health"; actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health"; + resultsReceiverServiceUrl = "https://results-receiver.actions.githubusercontent.com/health"; } else { @@ -56,13 +58,31 @@ public async Task RunCheck(string url, string pat) actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri; urlBuilder.Path = "_services/pipelines/_apis/health"; actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri; + resultsReceiverServiceUrl = string.Empty; // we don't have Results service in GHES yet. } + var codeLoadUrlBuilder = new UriBuilder(url); + codeLoadUrlBuilder.Host = $"codeload.{codeLoadUrlBuilder.Host}"; + codeLoadUrlBuilder.Path = "_ping"; + // check github api checkTasks.Add(CheckUtil.CheckDns(githubApiUrl)); checkTasks.Add(CheckUtil.CheckPing(githubApiUrl)); checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id")); + // check github codeload + checkTasks.Add(CheckUtil.CheckDns(codeLoadUrlBuilder.Uri.AbsoluteUri)); + checkTasks.Add(CheckUtil.CheckPing(codeLoadUrlBuilder.Uri.AbsoluteUri)); + checkTasks.Add(HostContext.CheckHttpsGetRequests(codeLoadUrlBuilder.Uri.AbsoluteUri, pat, expectedHeader: "X-GitHub-Request-Id")); + + // check results-receiver service + if (!string.IsNullOrEmpty(resultsReceiverServiceUrl)) + { + checkTasks.Add(CheckUtil.CheckDns(resultsReceiverServiceUrl)); + checkTasks.Add(CheckUtil.CheckPing(resultsReceiverServiceUrl)); + checkTasks.Add(HostContext.CheckHttpsGetRequests(resultsReceiverServiceUrl, pat, expectedHeader: "X-GitHub-Request-Id")); + } + // check actions token service checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl)); checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl)); diff --git a/src/Runner.Listener/JobDispatcher.cs b/src/Runner.Listener/JobDispatcher.cs index 512a4ee4188..ef664936ea8 100644 --- a/src/Runner.Listener/JobDispatcher.cs +++ b/src/Runner.Listener/JobDispatcher.cs @@ -35,7 +35,7 @@ public interface IJobDispatcher : IRunnerService // This implementation of IJobDispatcher is not thread safe. // It is based on the fact that the current design of the runner is a dequeue // and processes one message from the message queue at a time. - // In addition, it only executes one job every time, + // In addition, it only executes one job every time, // and the server will not send another job while this one is still running. public sealed class JobDispatcher : RunnerService, IJobDispatcher { @@ -546,13 +546,27 @@ await processChannel.SendAsync( Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result."); var jobServer = await InitializeJobServerAsync(systemConnection); - await LogWorkerProcessUnhandledException(jobServer, message, detailInfo); - - // Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space. - if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase)) + var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo }; + unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash; + switch (jobServer) { - Trace.Info($"Finish job with result 'Failed' due to IOException."); - await ForceFailJob(jobServer, message, detailInfo); + case IJobServer js: + { + await LogWorkerProcessUnhandledException(js, message, unhandledExceptionIssue); + // Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space. + if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase)) + { + Trace.Info($"Finish job with result 'Failed' due to IOException."); + await ForceFailJob(js, message); + } + + break; + } + case IRunServer rs: + await ForceFailJob(rs, message, unhandledExceptionIssue); + break; + default: + throw new NotSupportedException($"JobServer type '{jobServer.GetType().Name}' is not supported."); } } @@ -629,8 +643,22 @@ await processChannel.SendAsync( Trace.Info("worker process has been killed."); } } + catch (Exception ex) + { + // message send failed, this might indicate worker process is already exited or stuck. + Trace.Info($"Job cancel message sending for job {message.JobId} failed, kill running worker. {ex}"); + workerProcessCancelTokenSource.Cancel(); + try + { + await workerProcessTask; + } + catch (OperationCanceledException) + { + Trace.Info("worker process has been killed."); + } + } - // wait worker to exit + // wait worker to exit // if worker doesn't exit within timeout, then kill worker. completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken)); @@ -1117,77 +1145,70 @@ private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequest } // log an error issue to job level timeline record - private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo) + private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, Issue issue) { - if (server is IJobServer jobServer) + try { - try - { - var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None); - ArgUtil.NotNull(timeline, nameof(timeline)); + var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None); + ArgUtil.NotNull(timeline, nameof(timeline)); - TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job"); - ArgUtil.NotNull(jobRecord, nameof(jobRecord)); + TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job"); + ArgUtil.NotNull(jobRecord, nameof(jobRecord)); - var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo }; - unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash; - jobRecord.ErrorCount++; - jobRecord.Issues.Add(unhandledExceptionIssue); - await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None); - } - catch (Exception ex) + jobRecord.ErrorCount++; + jobRecord.Issues.Add(issue); + + if (message.Variables.TryGetValue("DistributedTask.MarkJobAsFailedOnWorkerCrash", out var markJobAsFailedOnWorkerCrash) && + StringUtil.ConvertToBoolean(markJobAsFailedOnWorkerCrash?.Value)) { - Trace.Error("Fail to report unhandled exception from Runner.Worker process"); - Trace.Error(ex); + Trace.Info("Mark the job as failed since the worker crashed"); + jobRecord.Result = TaskResult.Failed; + // mark the job as completed so service will pickup the result + jobRecord.State = TimelineRecordState.Completed; } + + await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None); } - else + catch (Exception ex) { - Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", detailInfo); - return; + Trace.Error("Fail to report unhandled exception from Runner.Worker process"); + Trace.Error(ex); } } // raise job completed event to fail the job. - private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo) + private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message) { - if (server is IJobServer jobServer) + try { - try - { - var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed); - await jobServer.RaisePlanEventAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None); - } - catch (Exception ex) - { - Trace.Error("Fail to raise JobCompletedEvent back to service."); - Trace.Error(ex); - } + var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed); + await jobServer.RaisePlanEventAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None); } - else if (server is IRunServer runServer) + catch (Exception ex) { - try - { - var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo }; - var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation(); - var jobAnnotations = new List(); - if (unhandledAnnotation.HasValue) - { - jobAnnotations.Add(unhandledAnnotation.Value); - } + Trace.Error("Fail to raise JobCompletedEvent back to service."); + Trace.Error(ex); + } + } - await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None); - } - catch (Exception ex) + private async Task ForceFailJob(IRunServer runServer, Pipelines.AgentJobRequestMessage message, Issue issue) + { + try + { + var annotation = issue.ToAnnotation(); + var jobAnnotations = new List(); + if (annotation.HasValue) { - Trace.Error("Fail to raise job completion back to service."); - Trace.Error(ex); + jobAnnotations.Add(annotation.Value); } + + await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None); } - else + catch (Exception ex) { - throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported."); + Trace.Error("Fail to raise job completion back to service."); + Trace.Error(ex); } } diff --git a/src/Runner.Listener/MessageListener.cs b/src/Runner.Listener/MessageListener.cs index 113573b6aba..7be8b0bce84 100644 --- a/src/Runner.Listener/MessageListener.cs +++ b/src/Runner.Listener/MessageListener.cs @@ -14,6 +14,7 @@ using GitHub.Runner.Sdk; using GitHub.Services.Common; using GitHub.Services.OAuth; +using GitHub.Services.WebApi; namespace GitHub.Runner.Listener { @@ -24,6 +25,8 @@ public interface IMessageListener : IRunnerService Task DeleteSessionAsync(); Task GetNextMessageAsync(CancellationToken token); Task DeleteMessageAsync(TaskAgentMessage message); + + Task RefreshListenerTokenAsync(CancellationToken token); void OnJobStatus(object sender, JobStatusEventArgs e); } @@ -33,6 +36,7 @@ public sealed class MessageListener : RunnerService, IMessageListener private RunnerSettings _settings; private ITerminal _term; private IRunnerServer _runnerServer; + private IBrokerServer _brokerServer; private TaskAgentSession _session; private TimeSpan _getNextMessageRetryInterval; private bool _accessTokenRevoked = false; @@ -42,6 +46,9 @@ public sealed class MessageListener : RunnerService, IMessageListener private readonly Dictionary _sessionCreationExceptionTracker = new(); private TaskAgentStatus runnerStatus = TaskAgentStatus.Online; private CancellationTokenSource _getMessagesTokenSource; + private VssCredentials _creds; + + private bool _isBrokerSession = false; public override void Initialize(IHostContext hostContext) { @@ -49,6 +56,7 @@ public override void Initialize(IHostContext hostContext) _term = HostContext.GetService(); _runnerServer = HostContext.GetService(); + _brokerServer = hostContext.GetService(); } public async Task CreateSessionAsync(CancellationToken token) @@ -64,7 +72,7 @@ public async Task CreateSessionAsync(CancellationToken token) // Create connection. Trace.Info("Loading Credentials"); var credMgr = HostContext.GetService(); - VssCredentials creds = credMgr.LoadCredentials(); + _creds = credMgr.LoadCredentials(); var agent = new TaskAgentReference { @@ -86,7 +94,7 @@ public async Task CreateSessionAsync(CancellationToken token) try { Trace.Info("Connecting to the Runner Server..."); - await _runnerServer.ConnectAsync(new Uri(serverUrl), creds); + await _runnerServer.ConnectAsync(new Uri(serverUrl), _creds); Trace.Info("VssConnection created"); _term.WriteLine(); @@ -98,6 +106,15 @@ public async Task CreateSessionAsync(CancellationToken token) taskAgentSession, token); + if (_session.BrokerMigrationMessage != null) + { + Trace.Info("Runner session is in migration mode: Creating Broker session with BrokerBaseUrl: {0}", _session.BrokerMigrationMessage.BrokerBaseUrl); + + await _brokerServer.UpdateConnectionIfNeeded(_session.BrokerMigrationMessage.BrokerBaseUrl, _creds); + _session = await _brokerServer.CreateSessionAsync(taskAgentSession, token); + _isBrokerSession = true; + } + Trace.Info($"Session created."); if (encounteringError) { @@ -124,7 +141,7 @@ public async Task CreateSessionAsync(CancellationToken token) Trace.Error("Catch exception during create session."); Trace.Error(ex); - if (ex is VssOAuthTokenRequestException vssOAuthEx && creds.Federated is VssOAuthCredential vssOAuthCred) + if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred) { // "invalid_client" means the runner registration has been deleted from the server. if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase)) @@ -171,6 +188,11 @@ public async Task DeleteSessionAsync() { using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30))) { + if (_isBrokerSession) + { + await _brokerServer.DeleteSessionAsync(ts.Token); + return; + } await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token); } } @@ -183,19 +205,17 @@ public async Task DeleteSessionAsync() public void OnJobStatus(object sender, JobStatusEventArgs e) { - if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW"))) + Trace.Info("Received job status event. JobState: {0}", e.Status); + runnerStatus = e.Status; + try { - Trace.Info("Received job status event. JobState: {0}", e.Status); - runnerStatus = e.Status; - try - { - _getMessagesTokenSource?.Cancel(); - } - catch (ObjectDisposedException) - { - Trace.Info("_getMessagesTokenSource is already disposed."); - } + _getMessagesTokenSource?.Cancel(); + } + catch (ObjectDisposedException) + { + Trace.Info("_getMessagesTokenSource is already disposed."); } + } public async Task GetNextMessageAsync(CancellationToken token) @@ -228,6 +248,23 @@ public async Task GetNextMessageAsync(CancellationToken token) // Decrypt the message body if the session is using encryption message = DecryptMessage(message); + + if (message != null && message.MessageType == BrokerMigrationMessage.MessageType) + { + Trace.Info("BrokerMigration message received. Polling Broker for messages..."); + + var migrationMessage = JsonUtility.FromString(message.Body); + + await _brokerServer.UpdateConnectionIfNeeded(migrationMessage.BrokerBaseUrl, _creds); + message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId, + runnerStatus, + BuildConstants.RunnerPackage.Version, + VarUtil.OS, + VarUtil.OSArchitecture, + _settings.DisableUpdate, + token); + } + if (message != null) { _lastMessageId = message.MessageId; @@ -343,6 +380,12 @@ public async Task DeleteMessageAsync(TaskAgentMessage message) } } + public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken) + { + await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60)); + await _brokerServer.ForceRefreshConnection(_creds); + } + private TaskAgentMessage DecryptMessage(TaskAgentMessage message) { if (_session.EncryptionKey == null || diff --git a/src/Runner.Listener/Runner.Listener.csproj b/src/Runner.Listener/Runner.Listener.csproj index 0031b91dc5e..3cd72ec61f8 100644 --- a/src/Runner.Listener/Runner.Listener.csproj +++ b/src/Runner.Listener/Runner.Listener.csproj @@ -25,12 +25,6 @@ - - - GitHub.Runner.Listener.runnercoreassets - - - portable diff --git a/src/Runner.Listener/Runner.cs b/src/Runner.Listener/Runner.cs index 0e05e8a1518..f44db4cb548 100644 --- a/src/Runner.Listener/Runner.cs +++ b/src/Runner.Listener/Runner.cs @@ -596,6 +596,11 @@ await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, Trace.Info($"Service requests the hosted runner to shutdown. Reason: '{HostedRunnerShutdownMessage.Reason}'."); return Constants.Runner.ReturnCode.Success; } + else if (string.Equals(message.MessageType, TaskAgentMessageTypes.ForceTokenRefresh)) + { + Trace.Info("Received ForceTokenRefreshMessage"); + await _listener.RefreshListenerTokenAsync(messageQueueLoopTokenSource.Token); + } else { Trace.Error($"Received message {message.MessageId} with unsupported message type {message.MessageType}."); @@ -634,6 +639,7 @@ await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, { try { + Trace.Info("Deleting Runner Session..."); await _listener.DeleteSessionAsync(); } catch (Exception ex) when (runOnce) diff --git a/src/Runner.Listener/SelfUpdater.cs b/src/Runner.Listener/SelfUpdater.cs index 3f97b2254d0..6ebeebd8270 100644 --- a/src/Runner.Listener/SelfUpdater.cs +++ b/src/Runner.Listener/SelfUpdater.cs @@ -6,13 +6,11 @@ using System.IO.Compression; using System.Linq; using System.Net.Http; -using System.Reflection; using System.Security.Cryptography; using System.Threading; using System.Threading.Tasks; using GitHub.DistributedTask.WebApi; using GitHub.Runner.Common; -using GitHub.Runner.Common.Util; using GitHub.Runner.Sdk; using GitHub.Services.Common; using GitHub.Services.WebApi; @@ -30,20 +28,14 @@ public class SelfUpdater : RunnerService, ISelfUpdater { private static string _packageType = "agent"; private static string _platform = BuildConstants.RunnerPackage.PackageName; - private static string _dotnetRuntime = "dotnetRuntime"; - private static string _externals = "externals"; - private readonly Dictionary _contentHashes = new(); private PackageMetadata _targetPackage; private ITerminal _terminal; private IRunnerServer _runnerServer; private int _poolId; private ulong _agentId; + private const int _numberOfOldVersionsToKeep = 1; private readonly ConcurrentQueue _updateTrace = new(); - private Task _cloneAndCalculateContentHashTask; - private string _dotnetRuntimeCloneDirectory; - private string _externalsCloneDirectory; - public bool Busy { get; private set; } public override void Initialize(IHostContext hostContext) @@ -56,8 +48,6 @@ public override void Initialize(IHostContext hostContext) var settings = configStore.GetSettings(); _poolId = settings.PoolId; _agentId = settings.AgentId; - _dotnetRuntimeCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__dotnet_runtime__"); - _externalsCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals__"); } public async Task SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token) @@ -67,13 +57,6 @@ public async Task SelfUpdate(AgentRefreshMessage updateMessage, IJobDispat { var totalUpdateTime = Stopwatch.StartNew(); - // Copy dotnet runtime and externals of current runner to a temp folder - // So we can re-use them with trimmed runner package, if possible. - // This process is best effort, if we can't use trimmed runner package, - // we will just go with the full package. - var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token); - _cloneAndCalculateContentHashTask = CloneAndCalculateAssetsHash(_dotnetRuntimeCloneDirectory, _externalsCloneDirectory, linkedTokenSource.Token); - if (!await UpdateNeeded(updateMessage.TargetVersion, token)) { Trace.Info($"Can't find available update package."); @@ -87,24 +70,6 @@ public async Task SelfUpdate(AgentRefreshMessage updateMessage, IJobDispat await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner."); await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner"); - if (_targetPackage.TrimmedPackages?.Count > 0) - { - // wait for cloning assets task to finish only if we have trimmed packages - await _cloneAndCalculateContentHashTask; - } - else - { - linkedTokenSource.Cancel(); - try - { - await _cloneAndCalculateContentHashTask; - } - catch (Exception ex) - { - Trace.Info($"Ingore errors after cancelling cloning assets task: {ex}"); - } - } - await DownloadLatestRunner(token, updateMessage.TargetVersion); Trace.Info($"Download latest runner and unzip into runner root."); @@ -218,54 +183,8 @@ private async Task DownloadLatestRunner(CancellationToken token, string targetVe string archiveFile = null; var packageDownloadUrl = _targetPackage.DownloadUrl; var packageHashValue = _targetPackage.HashValue; - var runtimeTrimmed = false; - var externalsTrimmed = false; - var fallbackToFullPackage = false; - - // Only try trimmed package if sever sends them and we have calculated hash value of the current runtime/externals. - if (_contentHashes.Count == 2 && - _contentHashes.ContainsKey(_dotnetRuntime) && - _contentHashes.ContainsKey(_externals) && - _targetPackage.TrimmedPackages?.Count > 0) - { - Trace.Info($"Current runner content hash: {StringUtil.ConvertToJson(_contentHashes)}"); - Trace.Info($"Trimmed packages info from service: {StringUtil.ConvertToJson(_targetPackage.TrimmedPackages)}"); - // Try to see whether we can use any size trimmed down package to speed up runner updates. - foreach (var trimmedPackage in _targetPackage.TrimmedPackages) - { - if (trimmedPackage.TrimmedContents.Count == 2 && - trimmedPackage.TrimmedContents.TryGetValue(_dotnetRuntime, out var trimmedRuntimeHash) && - trimmedRuntimeHash == _contentHashes[_dotnetRuntime] && - trimmedPackage.TrimmedContents.TryGetValue(_externals, out var trimmedExternalsHash) && - trimmedExternalsHash == _contentHashes[_externals]) - { - Trace.Info($"Use trimmed (runtime+externals) package '{trimmedPackage.DownloadUrl}' to update runner."); - packageDownloadUrl = trimmedPackage.DownloadUrl; - packageHashValue = trimmedPackage.HashValue; - runtimeTrimmed = true; - externalsTrimmed = true; - break; - } - else if (trimmedPackage.TrimmedContents.Count == 1 && - trimmedPackage.TrimmedContents.TryGetValue(_externals, out trimmedExternalsHash) && - trimmedExternalsHash == _contentHashes[_externals]) - { - Trace.Info($"Use trimmed (externals) package '{trimmedPackage.DownloadUrl}' to update runner."); - packageDownloadUrl = trimmedPackage.DownloadUrl; - packageHashValue = trimmedPackage.HashValue; - externalsTrimmed = true; - break; - } - else - { - Trace.Info($"Can't use trimmed package from '{trimmedPackage.DownloadUrl}' since the current runner does not carry those trimmed content (Hash mismatch)."); - } - } - } _updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}"); - _updateTrace.Enqueue($"RuntimeTrimmed: {runtimeTrimmed}"); - _updateTrace.Enqueue($"ExternalsTrimmed: {externalsTrimmed}"); try { @@ -323,12 +242,6 @@ private async Task DownloadLatestRunner(CancellationToken token, string targetVe await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token); } - catch (Exception ex) when (runtimeTrimmed || externalsTrimmed) - { - // if anything failed when we use trimmed package (download/validatehase/extract), try again with the full runner package. - Trace.Error($"Fail to download latest runner using trimmed package: {ex}"); - fallbackToFullPackage = true; - } finally { try @@ -347,74 +260,6 @@ private async Task DownloadLatestRunner(CancellationToken token, string targetVe } } - var trimmedPackageRestoreTasks = new List>(); - if (!fallbackToFullPackage) - { - // Skip restoring externals and runtime if we are going to fullback to the full package. - if (externalsTrimmed) - { - trimmedPackageRestoreTasks.Add(RestoreTrimmedExternals(latestRunnerDirectory, token)); - } - if (runtimeTrimmed) - { - trimmedPackageRestoreTasks.Add(RestoreTrimmedDotnetRuntime(latestRunnerDirectory, token)); - } - } - - if (trimmedPackageRestoreTasks.Count > 0) - { - var restoreResults = await Task.WhenAll(trimmedPackageRestoreTasks); - if (restoreResults.Any(x => x == false)) - { - // if any of the restore failed, fallback to full package. - fallbackToFullPackage = true; - } - } - - if (fallbackToFullPackage) - { - Trace.Error("Something wrong with the trimmed runner package, failback to use the full package for runner updates."); - _updateTrace.Enqueue($"FallbackToFullPackage: {fallbackToFullPackage}"); - - IOUtil.DeleteDirectory(latestRunnerDirectory, token); - Directory.CreateDirectory(latestRunnerDirectory); - - packageDownloadUrl = _targetPackage.DownloadUrl; - packageHashValue = _targetPackage.HashValue; - _updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}"); - - try - { - archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token); - - if (string.IsNullOrEmpty(archiveFile)) - { - throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts"); - } - - await ValidateRunnerHash(archiveFile, packageHashValue); - - await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token); - } - finally - { - try - { - // delete .zip file - if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile)) - { - Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile); - IOUtil.DeleteFile(archiveFile); - } - } - catch (Exception ex) - { - //it is not critical if we fail to delete the .zip file - Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex); - } - } - } - await CopyLatestRunnerToRoot(latestRunnerDirectory, token); } @@ -665,9 +510,9 @@ private void DeletePreviousVersionRunnerBackup(CancellationToken token) // delete old bin.2.99.0 folder, only leave the current version and the latest download version var allBinDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "bin.*"); - if (allBinDirs.Length > 2) + if (allBinDirs.Length > _numberOfOldVersionsToKeep) { - // there are more than 2 bin.version folder. + // there are more than one bin.version folder. // delete older bin.version folders. foreach (var oldBinDir in allBinDirs) { @@ -694,9 +539,9 @@ private void DeletePreviousVersionRunnerBackup(CancellationToken token) // delete old externals.2.99.0 folder, only leave the current version and the latest download version var allExternalsDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "externals.*"); - if (allExternalsDirs.Length > 2) + if (allExternalsDirs.Length > _numberOfOldVersionsToKeep) { - // there are more than 2 externals.version folder. + // there are more than one externals.version folder. // delete older externals.version folders. foreach (var oldExternalDir in allExternalsDirs) { @@ -795,330 +640,5 @@ private async Task UpdateRunnerUpdateStateAsync(string currentState) Trace.Info($"Catch exception during report update state, ignore this error and continue auto-update."); } } - - private async Task RestoreTrimmedExternals(string downloadDirectory, CancellationToken token) - { - // Copy the current runner's externals if we are using a externals trimmed package - // Execute the node.js to make sure the copied externals is working. - var stopWatch = Stopwatch.StartNew(); - try - { - Trace.Info($"Copy {_externalsCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory)}."); - IOUtil.CopyDirectory(_externalsCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory), token); - - // try run node.js to see if current node.js works fine after copy over to new location. - var nodeVersions = NodeUtil.BuiltInNodeVersions; - foreach (var nodeVersion in nodeVersions) - { - var newNodeBinary = Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory, nodeVersion, "bin", $"node{IOUtil.ExeExtension}"); - if (File.Exists(newNodeBinary)) - { - using (var p = HostContext.CreateService()) - { - var outputs = ""; - p.ErrorDataReceived += (_, data) => - { - if (!string.IsNullOrEmpty(data.Data)) - { - Trace.Error(data.Data); - } - }; - p.OutputDataReceived += (_, data) => - { - if (!string.IsNullOrEmpty(data.Data)) - { - Trace.Info(data.Data); - outputs = data.Data; - } - }; - var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newNodeBinary, $"-e \"console.log('{nameof(RestoreTrimmedExternals)}')\"", null, token); - if (exitCode != 0) - { - Trace.Error($"{newNodeBinary} -e \"console.log()\" failed with exit code {exitCode}"); - return false; - } - - if (!string.Equals(outputs, nameof(RestoreTrimmedExternals), StringComparison.OrdinalIgnoreCase)) - { - Trace.Error($"{newNodeBinary} -e \"console.log()\" did not output expected content."); - return false; - } - } - } - } - - return true; - } - catch (Exception ex) - { - Trace.Error($"Fail to restore externals for trimmed package: {ex}"); - return false; - } - finally - { - stopWatch.Stop(); - _updateTrace.Enqueue($"{nameof(RestoreTrimmedExternals)}Time: {stopWatch.ElapsedMilliseconds}ms"); - } - } - - private async Task RestoreTrimmedDotnetRuntime(string downloadDirectory, CancellationToken token) - { - // Copy the current runner's dotnet runtime if we are using a dotnet runtime trimmed package - // Execute the runner.listener to make sure the copied runtime is working. - var stopWatch = Stopwatch.StartNew(); - try - { - Trace.Info($"Copy {_dotnetRuntimeCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.BinDirectory)}."); - IOUtil.CopyDirectory(_dotnetRuntimeCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.BinDirectory), token); - - // try run the runner executable to see if current dotnet runtime + future runner binary works fine. - var newRunnerBinary = Path.Combine(downloadDirectory, Constants.Path.BinDirectory, "Runner.Listener"); - using (var p = HostContext.CreateService()) - { - p.ErrorDataReceived += (_, data) => - { - if (!string.IsNullOrEmpty(data.Data)) - { - Trace.Error(data.Data); - } - }; - p.OutputDataReceived += (_, data) => - { - if (!string.IsNullOrEmpty(data.Data)) - { - Trace.Info(data.Data); - } - }; - var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newRunnerBinary, "--version", null, token); - if (exitCode != 0) - { - Trace.Error($"{newRunnerBinary} --version failed with exit code {exitCode}"); - return false; - } - else - { - return true; - } - } - } - catch (Exception ex) - { - Trace.Error($"Fail to restore dotnet runtime for trimmed package: {ex}"); - return false; - } - finally - { - stopWatch.Stop(); - _updateTrace.Enqueue($"{nameof(RestoreTrimmedDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms"); - } - } - - private async Task CloneAndCalculateAssetsHash(string dotnetRuntimeCloneDirectory, string externalsCloneDirectory, CancellationToken token) - { - var runtimeCloneTask = CloneDotnetRuntime(dotnetRuntimeCloneDirectory, token); - var externalsCloneTask = CloneExternals(externalsCloneDirectory, token); - - var waitingTasks = new Dictionary() - { - {nameof(CloneDotnetRuntime), runtimeCloneTask}, - {nameof(CloneExternals),externalsCloneTask} - }; - - while (waitingTasks.Count > 0) - { - Trace.Info($"Waiting for {waitingTasks.Count} tasks to complete."); - var complatedTask = await Task.WhenAny(waitingTasks.Values); - if (waitingTasks.ContainsKey(nameof(CloneExternals)) && - complatedTask == waitingTasks[nameof(CloneExternals)]) - { - Trace.Info($"Externals clone finished."); - waitingTasks.Remove(nameof(CloneExternals)); - try - { - if (await externalsCloneTask && !token.IsCancellationRequested) - { - var externalsHash = await HashFiles(externalsCloneDirectory, token); - Trace.Info($"Externals content hash: {externalsHash}"); - _contentHashes[_externals] = externalsHash; - _updateTrace.Enqueue($"ExternalsHash: {_contentHashes[_externals]}"); - } - else - { - Trace.Error($"Skip compute hash since clone externals failed/cancelled."); - } - } - catch (Exception ex) - { - Trace.Error($"Fail to hash externals content: {ex}"); - } - } - else if (waitingTasks.ContainsKey(nameof(CloneDotnetRuntime)) && - complatedTask == waitingTasks[nameof(CloneDotnetRuntime)]) - { - Trace.Info($"Dotnet runtime clone finished."); - waitingTasks.Remove(nameof(CloneDotnetRuntime)); - try - { - if (await runtimeCloneTask && !token.IsCancellationRequested) - { - var runtimeHash = await HashFiles(dotnetRuntimeCloneDirectory, token); - Trace.Info($"Runtime content hash: {runtimeHash}"); - _contentHashes[_dotnetRuntime] = runtimeHash; - _updateTrace.Enqueue($"DotnetRuntimeHash: {_contentHashes[_dotnetRuntime]}"); - } - else - { - Trace.Error($"Skip compute hash since clone dotnet runtime failed/cancelled."); - } - } - catch (Exception ex) - { - Trace.Error($"Fail to hash runtime content: {ex}"); - } - } - - Trace.Info($"Still waiting for {waitingTasks.Count} tasks to complete."); - } - } - - private async Task CloneDotnetRuntime(string runtimeDir, CancellationToken token) - { - var stopWatch = Stopwatch.StartNew(); - try - { - Trace.Info($"Cloning dotnet runtime to {runtimeDir}"); - IOUtil.DeleteDirectory(runtimeDir, CancellationToken.None); - Directory.CreateDirectory(runtimeDir); - - var assembly = Assembly.GetExecutingAssembly(); - var assetsContent = default(string); - using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Listener.runnercoreassets")) - using (var streamReader = new StreamReader(stream)) - { - assetsContent = await streamReader.ReadToEndAsync(); - } - - if (!string.IsNullOrEmpty(assetsContent)) - { - var runnerCoreAssets = assetsContent.Split(new[] { "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries); - if (runnerCoreAssets.Length > 0) - { - var binDir = HostContext.GetDirectory(WellKnownDirectory.Bin); - IOUtil.CopyDirectory(binDir, runtimeDir, token); - - var clonedFile = 0; - foreach (var file in Directory.EnumerateFiles(runtimeDir, "*", SearchOption.AllDirectories)) - { - token.ThrowIfCancellationRequested(); - if (runnerCoreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x.Trim()))) - { - Trace.Verbose($"{file} is part of the runner core, delete from cloned runtime directory."); - IOUtil.DeleteFile(file); - } - else - { - clonedFile++; - } - } - - Trace.Info($"Successfully cloned dotnet runtime to {runtimeDir}. Total files: {clonedFile}"); - return true; - } - } - } - catch (Exception ex) - { - Trace.Error($"Fail to clone dotnet runtime to {runtimeDir}"); - Trace.Error(ex); - } - finally - { - stopWatch.Stop(); - _updateTrace.Enqueue($"{nameof(CloneDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms"); - } - - return false; - } - - private Task CloneExternals(string externalsDir, CancellationToken token) - { - var stopWatch = Stopwatch.StartNew(); - try - { - Trace.Info($"Cloning externals to {externalsDir}"); - IOUtil.DeleteDirectory(externalsDir, CancellationToken.None); - Directory.CreateDirectory(externalsDir); - IOUtil.CopyDirectory(HostContext.GetDirectory(WellKnownDirectory.Externals), externalsDir, token); - Trace.Info($"Successfully cloned externals to {externalsDir}."); - return Task.FromResult(true); - } - catch (Exception ex) - { - Trace.Error($"Fail to clone externals to {externalsDir}"); - Trace.Error(ex); - } - finally - { - stopWatch.Stop(); - _updateTrace.Enqueue($"{nameof(CloneExternals)}Time: {stopWatch.ElapsedMilliseconds}ms"); - } - - return Task.FromResult(false); - } - - private async Task HashFiles(string fileFolder, CancellationToken token) - { - Trace.Info($"Calculating hash for {fileFolder}"); - - var stopWatch = Stopwatch.StartNew(); - string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin); - string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); - string hashFilesScript = Path.Combine(binDir, "hashFiles"); - var hashResult = string.Empty; - - using (var processInvoker = HostContext.CreateService()) - { - processInvoker.ErrorDataReceived += (_, data) => - { - if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__")) - { - hashResult = data.Data.Substring(10, data.Data.Length - 20); - Trace.Info($"Hash result: '{hashResult}'"); - } - else - { - Trace.Info(data.Data); - } - }; - - processInvoker.OutputDataReceived += (_, data) => - { - Trace.Verbose(data.Data); - }; - - var env = new Dictionary - { - ["patterns"] = "**" - }; - - int exitCode = await processInvoker.ExecuteAsync(workingDirectory: fileFolder, - fileName: node, - arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"", - environment: env, - requireExitCodeZero: false, - outputEncoding: null, - killProcessOnCancel: true, - cancellationToken: token); - - if (exitCode != 0) - { - Trace.Error($"hashFiles returns '{exitCode}' failed. Fail to hash files under directory '{fileFolder}'"); - } - - stopWatch.Stop(); - _updateTrace.Enqueue($"{nameof(HashFiles)}{Path.GetFileName(fileFolder)}Time: {stopWatch.ElapsedMilliseconds}ms"); - return hashResult; - } - } } } diff --git a/src/Runner.Listener/SelfUpdaterV2.cs b/src/Runner.Listener/SelfUpdaterV2.cs index 46bced18c08..b64619b69e5 100644 --- a/src/Runner.Listener/SelfUpdaterV2.cs +++ b/src/Runner.Listener/SelfUpdaterV2.cs @@ -149,7 +149,6 @@ private async Task DownloadLatestRunner(CancellationToken token, string targetVe string archiveFile = null; - // Only try trimmed package if sever sends them and we have calculated hash value of the current runtime/externals. _updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}"); try diff --git a/src/Runner.Sdk/Util/VssUtil.cs b/src/Runner.Sdk/Util/VssUtil.cs index bf74fa16cac..f35b0c21dd1 100644 --- a/src/Runner.Sdk/Util/VssUtil.cs +++ b/src/Runner.Sdk/Util/VssUtil.cs @@ -23,7 +23,13 @@ public static void InitializeVssClientSettings(List addi if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0) { - headerValues.AddRange(VssClientHttpRequestSettings.Default.UserAgent); + foreach (var headerVal in VssClientHttpRequestSettings.Default.UserAgent) + { + if (!headerValues.Contains(headerVal)) + { + headerValues.Add(headerVal); + } + } } VssClientHttpRequestSettings.Default.UserAgent = headerValues; @@ -33,6 +39,23 @@ public static void InitializeVssClientSettings(List addi { VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator; } + + var rawHeaderValues = new List(); + rawHeaderValues.AddRange(additionalUserAgents); + rawHeaderValues.Add(new ProductInfoHeaderValue($"({StringUtil.SanitizeUserAgentHeader(RuntimeInformation.OSDescription)})")); + + if (RawClientHttpRequestSettings.Default.UserAgent != null && RawClientHttpRequestSettings.Default.UserAgent.Count > 0) + { + foreach (var headerVal in RawClientHttpRequestSettings.Default.UserAgent) + { + if (!rawHeaderValues.Contains(headerVal)) + { + rawHeaderValues.Add(headerVal); + } + } + } + + RawClientHttpRequestSettings.Default.UserAgent = rawHeaderValues; } public static VssConnection CreateConnection( @@ -62,11 +85,6 @@ public static VssConnection CreateConnection( settings.SendTimeout = TimeSpan.FromSeconds(Math.Min(Math.Max(httpRequestTimeoutSeconds, 100), 1200)); } - if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW"))) - { - settings.AllowAutoRedirectForBroker = true; - } - // Remove Invariant from the list of accepted languages. // // The constructor of VssHttpRequestSettings (base class of VssClientHttpRequestSettings) adds the current diff --git a/src/Runner.Worker/ActionManifestManager.cs b/src/Runner.Worker/ActionManifestManager.cs index 6b46e6a4e92..351c2427daf 100644 --- a/src/Runner.Worker/ActionManifestManager.cs +++ b/src/Runner.Worker/ActionManifestManager.cs @@ -144,7 +144,7 @@ public ActionDefinitionData Load(IExecutionContext executionContext, string mani executionContext.Error(error.Message); } - throw new ArgumentException($"Fail to load {fileRelativePath}"); + throw new ArgumentException($"Failed to load {fileRelativePath}"); } if (actionDefinition.Execution == null) diff --git a/src/Runner.Worker/ContainerOperationProvider.cs b/src/Runner.Worker/ContainerOperationProvider.cs index 75475f7f776..eda7cbfb15a 100644 --- a/src/Runner.Worker/ContainerOperationProvider.cs +++ b/src/Runner.Worker/ContainerOperationProvider.cs @@ -488,17 +488,39 @@ private async Task ContainerRegistryLogin(IExecutionContext executionCon { throw new InvalidOperationException($"Failed to create directory to store registry client credentials: {e.Message}"); } - var loginExitCode = await _dockerManager.DockerLogin( - executionContext, - configLocation, - container.RegistryServer, - container.RegistryAuthUsername, - container.RegistryAuthPassword); - if (loginExitCode != 0) + // Login docker with retry up to 3 times + int retryCount = 0; + int loginExitCode = 0; + while (retryCount < 3) + { + loginExitCode = await _dockerManager.DockerLogin( + executionContext, + configLocation, + container.RegistryServer, + container.RegistryAuthUsername, + container.RegistryAuthPassword); + if (loginExitCode == 0) + { + break; + } + else + { + retryCount++; + if (retryCount < 3) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10)); + executionContext.Warning($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}, back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + } + } + + if (retryCount == 3 && loginExitCode != 0) { throw new InvalidOperationException($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}"); } + return configLocation; } diff --git a/src/Runner.Worker/DiagnosticLogManager.cs b/src/Runner.Worker/DiagnosticLogManager.cs index 7259bab26bd..261689b5f3f 100644 --- a/src/Runner.Worker/DiagnosticLogManager.cs +++ b/src/Runner.Worker/DiagnosticLogManager.cs @@ -108,6 +108,8 @@ public void UploadDiagnosticLogs(IExecutionContext executionContext, parentContext.QueueAttachFile(type: CoreAttachmentType.DiagnosticLog, name: diagnosticsZipFileName, filePath: diagnosticsZipFilePath); + parentContext.QueueDiagnosticLogFile(name: diagnosticsZipFileName, filePath: diagnosticsZipFilePath); + executionContext.Debug("Diagnostic file upload complete."); } diff --git a/src/Runner.Worker/ExecutionContext.cs b/src/Runner.Worker/ExecutionContext.cs index 9ed9faeb024..77c145d1ddc 100644 --- a/src/Runner.Worker/ExecutionContext.cs +++ b/src/Runner.Worker/ExecutionContext.cs @@ -90,6 +90,7 @@ public interface IExecutionContext : IRunnerService long Write(string tag, string message); void QueueAttachFile(string type, string name, string filePath); void QueueSummaryFile(string name, string filePath, Guid stepRecordId); + void QueueDiagnosticLogFile(string name, string filePath); // timeline record update methods void Start(string currentOperation = null); @@ -397,11 +398,11 @@ public IExecutionContext CreateChild( if (recordOrder != null) { - child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, recordOrder); + child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, recordOrder, embedded: isEmbedded); } else { - child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder); + child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder, embedded: isEmbedded); } if (logger != null) { @@ -432,7 +433,7 @@ public IExecutionContext CreateEmbeddedChild( Dictionary intraActionState = null, string siblingScopeName = null) { - return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: null, intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName, timeout: GetRemainingTimeout()); + return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: null, intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName, timeout: GetRemainingTimeout(), recordOrder: _record.Order); } public void Start(string currentOperation = null) @@ -982,6 +983,18 @@ public void QueueSummaryFile(string name, string filePath, Guid stepRecordId) _jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0); } + public void QueueDiagnosticLogFile(string name, string filePath) + { + ArgUtil.NotNullOrEmpty(name, nameof(name)); + ArgUtil.NotNullOrEmpty(filePath, nameof(filePath)); + + if (!File.Exists(filePath)) + { + throw new FileNotFoundException($"Can't upload diagnostic log file: {filePath}. File does not exist."); + } + _jobServerQueue.QueueResultsUpload(_record.Id, name, filePath, CoreAttachmentType.ResultsDiagnosticLog, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0); + } + // Add OnMatcherChanged public void Add(OnMatcherChanged handler) { @@ -1160,7 +1173,7 @@ public void WriteWebhookPayload() } } - private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order) + private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order, bool embedded = false) { _mainTimelineId = timelineId; _record.Id = timelineRecordId; @@ -1186,7 +1199,11 @@ private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Gu var configuration = HostContext.GetService(); _record.WorkerName = configuration.GetSettings().AgentName; - _jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record); + // We don't want to update the timeline record for embedded steps since they are not really represented in the UI. + if (!embedded) + { + _jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record); + } } private void JobServerQueueThrottling_EventReceived(object sender, ThrottlingEventArgs data) diff --git a/src/Runner.Worker/FileCommandManager.cs b/src/Runner.Worker/FileCommandManager.cs index b03c3189003..0021aa527a3 100644 --- a/src/Runner.Worker/FileCommandManager.cs +++ b/src/Runner.Worker/FileCommandManager.cs @@ -244,7 +244,7 @@ public void ProcessCommand(IExecutionContext context, string filePath, Container if (resultsReceiverEndpoint != null) { Trace.Info($"Queueing results file ({filePath}) for attachment upload ({attachmentName})"); - var stepId = context.Id; + var stepId = context.IsEmbedded ? context.EmbeddedId : context.Id; // Attachments must be added to the parent context (job), not the current context (step) context.Root.QueueSummaryFile(attachmentName, scrubbedFilePath, stepId); } diff --git a/src/Runner.Worker/JobExtension.cs b/src/Runner.Worker/JobExtension.cs index c010fa908d9..c420ddfc88f 100644 --- a/src/Runner.Worker/JobExtension.cs +++ b/src/Runner.Worker/JobExtension.cs @@ -392,6 +392,18 @@ public async Task> InitializeJob(IExecutionContext jobContext, Pipel } } + // Register custom image creation post-job step if the "snapshot" token is present in the message. + var snapshotRequest = templateEvaluator.EvaluateJobSnapshotRequest(message.Snapshot, jobContext.ExpressionValues, jobContext.ExpressionFunctions); + if (snapshotRequest != null) + { + var snapshotOperationProvider = HostContext.GetService(); + jobContext.RegisterPostJobStep(new JobExtensionRunner( + runAsync: (executionContext, _) => snapshotOperationProvider.CreateSnapshotRequestAsync(executionContext, snapshotRequest), + condition: $"{PipelineTemplateConstants.Success}()", + displayName: $"Create custom image", + data: null)); + } + // Register Job Completed hook if the variable is set var completedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED"); if (!string.IsNullOrEmpty(completedHookPath)) diff --git a/src/Runner.Worker/JobRunner.cs b/src/Runner.Worker/JobRunner.cs index dd6a8ac5506..ec4250cd5b9 100644 --- a/src/Runner.Worker/JobRunner.cs +++ b/src/Runner.Worker/JobRunner.cs @@ -49,6 +49,9 @@ public async Task RunAsync(AgentJobRequestMessage message, Cancellat !string.IsNullOrEmpty(orchestrationId.Value)) { HostContext.UserAgents.Add(new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value)); + + // make sure orchestration id is in the user-agent header. + VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy); } var jobServerQueueTelemetry = false; @@ -301,6 +304,8 @@ private async Task CompleteJobAsync(IRunServer runServer, IExecution jobContext.Warning(string.Format(Constants.Runner.EnforcedNode12DetectedAfterEndOfLife, actions)); } + await ShutdownQueue(throwOnFailure: false); + // Make sure to clean temp after file upload since they may be pending fileupload still use the TEMP dir. _tempDirectoryManager?.CleanupTempDirectory(); diff --git a/src/Runner.Worker/SnapshotOperationProvider.cs b/src/Runner.Worker/SnapshotOperationProvider.cs new file mode 100644 index 00000000000..73630d4985c --- /dev/null +++ b/src/Runner.Worker/SnapshotOperationProvider.cs @@ -0,0 +1,32 @@ +#nullable enable +using System.IO; +using System.Threading.Tasks; +using GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +namespace GitHub.Runner.Worker; + +[ServiceLocator(Default = typeof(SnapshotOperationProvider))] +public interface ISnapshotOperationProvider : IRunnerService +{ + Task CreateSnapshotRequestAsync(IExecutionContext executionContext, Snapshot snapshotRequest); +} + +public class SnapshotOperationProvider : RunnerService, ISnapshotOperationProvider +{ + public Task CreateSnapshotRequestAsync(IExecutionContext executionContext, Snapshot snapshotRequest) + { + var snapshotRequestFilePath = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), ".snapshot", "request.json"); + var snapshotRequestDirectoryPath = Path.GetDirectoryName(snapshotRequestFilePath); + if (snapshotRequestDirectoryPath != null) + { + Directory.CreateDirectory(snapshotRequestDirectoryPath); + } + + IOUtil.SaveObject(snapshotRequest, snapshotRequestFilePath); + executionContext.Output($"Request written to: {snapshotRequestFilePath}"); + executionContext.Output("This request will be processed after the job completes. You will not receive any feedback on the snapshot process within the workflow logs of this job."); + executionContext.Output("If the snapshot process is successful, you should see a new image with the requested name in the list of available custom images when creating a new GitHub-hosted Runner."); + return Task.CompletedTask; + } +} diff --git a/src/Runner.Worker/StepsRunner.cs b/src/Runner.Worker/StepsRunner.cs index 4c88726daf9..83ce87f6480 100644 --- a/src/Runner.Worker/StepsRunner.cs +++ b/src/Runner.Worker/StepsRunner.cs @@ -295,7 +295,7 @@ private async Task RunStepAsync(IStep step, CancellationToken jobCancellationTok !jobCancellationToken.IsCancellationRequested) { Trace.Error($"Caught timeout exception from step: {ex.Message}"); - step.ExecutionContext.Error("The action has timed out."); + step.ExecutionContext.Error($"The action '{step.DisplayName}' has timed out after {timeoutMinutes} minutes."); step.ExecutionContext.Result = TaskResult.Failed; } else diff --git a/src/Sdk/Common/Common/RawHttpHeaders.cs b/src/Sdk/Common/Common/RawHttpHeaders.cs index f4f6beabe1c..55f0caaf9b1 100644 --- a/src/Sdk/Common/Common/RawHttpHeaders.cs +++ b/src/Sdk/Common/Common/RawHttpHeaders.cs @@ -1,12 +1,11 @@ using System; using System.ComponentModel; -using System.Diagnostics.CodeAnalysis; namespace GitHub.Services.Common.Internal { [EditorBrowsable(EditorBrowsableState.Never)] public static class RawHttpHeaders { - public const String SessionHeader = "X-Runner-Session"; + public const String SessionHeader = "X-Actions-Session"; } } diff --git a/src/Sdk/Common/Common/RawHttpMessageHandler.cs b/src/Sdk/Common/Common/RawHttpMessageHandler.cs index 09cd2d59a60..316bcd576ff 100644 --- a/src/Sdk/Common/Common/RawHttpMessageHandler.cs +++ b/src/Sdk/Common/Common/RawHttpMessageHandler.cs @@ -138,6 +138,8 @@ protected override async Task SendAsync( response.Dispose(); } + this.Settings.ApplyTo(request); + // Let's start with sending a token IssuedToken token = null; if (m_tokenProvider != null) diff --git a/src/Sdk/Common/Common/VssHttpMessageHandler.cs b/src/Sdk/Common/Common/VssHttpMessageHandler.cs index 04a318d63e4..f48eec41af8 100644 --- a/src/Sdk/Common/Common/VssHttpMessageHandler.cs +++ b/src/Sdk/Common/Common/VssHttpMessageHandler.cs @@ -214,25 +214,7 @@ protected override async Task SendAsync( // ConfigureAwait(false) enables the continuation to be run outside any captured // SyncronizationContext (such as ASP.NET's) which keeps things from deadlocking... - var tmpResponse = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false); - if (Settings.AllowAutoRedirectForBroker && tmpResponse.StatusCode == HttpStatusCode.Redirect) - { - //Dispose of the previous response - tmpResponse?.Dispose(); - - var location = tmpResponse.Headers.Location; - request = new HttpRequestMessage(HttpMethod.Get, location); - - // Reapply the token to new redirected request - ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth); - - // Resend the request - response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false); - } - else - { - response = tmpResponse; - } + response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false); traceInfo?.TraceRequestSendTime(); diff --git a/src/Sdk/Common/Common/VssHttpRequestSettings.cs b/src/Sdk/Common/Common/VssHttpRequestSettings.cs index 6d67f9274ee..7279d28093f 100644 --- a/src/Sdk/Common/Common/VssHttpRequestSettings.cs +++ b/src/Sdk/Common/Common/VssHttpRequestSettings.cs @@ -110,16 +110,6 @@ public Boolean AllowAutoRedirect set; } - /// - /// Gets or sets a value indicating whether or not HttpClientHandler should follow redirect on outgoing broker requests - /// This is special since this also sends token in the request, where as default AllowAutoRedirect does not - /// - public Boolean AllowAutoRedirectForBroker - { - get; - set; - } - /// /// Gets or sets a value indicating whether or not compression should be used on outgoing requests. /// The default value is true. diff --git a/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs b/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs index 3f93e75e53c..070d86ee24f 100644 --- a/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs +++ b/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs @@ -43,6 +43,7 @@ public AgentJobRequestMessage( TemplateToken jobOutputs, IList defaults, ActionsEnvironmentReference actionsEnvironment, + TemplateToken snapshot, String messageType = JobRequestMessageTypes.PipelineAgentJobRequest) { this.MessageType = messageType; @@ -57,6 +58,7 @@ public AgentJobRequestMessage( this.Workspace = workspaceOptions; this.JobOutputs = jobOutputs; this.ActionsEnvironment = actionsEnvironment; + this.Snapshot = snapshot; m_variables = new Dictionary(variables, StringComparer.OrdinalIgnoreCase); m_maskHints = new List(maskHints); m_steps = new List(steps); @@ -237,6 +239,13 @@ public ActionsEnvironmentReference ActionsEnvironment set; } + [DataMember(EmitDefaultValue = false)] + public TemplateToken Snapshot + { + get; + set; + } + /// /// Gets the collection of variables associated with the current context. /// diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs index e9fb75dfa1d..a7e90fce334 100644 --- a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs @@ -29,6 +29,7 @@ public sealed class PipelineTemplateConstants public const String Id = "id"; public const String If = "if"; public const String Image = "image"; + public const String ImageName = "image-name"; public const String Include = "include"; public const String Inputs = "inputs"; public const String Job = "job"; @@ -60,6 +61,7 @@ public sealed class PipelineTemplateConstants public const String Services = "services"; public const String Shell = "shell"; public const String Skipped = "skipped"; + public const String Snapshot = "snapshot"; public const String StepEnv = "step-env"; public const String StepIfResult = "step-if-result"; public const String StepWith = "step-with"; diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs index 506a7d2689c..9d2c0bdca7a 100644 --- a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs @@ -346,6 +346,39 @@ internal static List> ConvertToJobServiceCont return result; } + internal static Snapshot ConvertToJobSnapshotRequest(TemplateContext context, TemplateToken token) + { + string imageName = null; + if (token is StringToken snapshotStringLiteral) + { + imageName = snapshotStringLiteral.Value; + } + else + { + var snapshotMapping = token.AssertMapping($"{PipelineTemplateConstants.Snapshot}"); + foreach (var snapshotPropertyPair in snapshotMapping) + { + var propertyName = snapshotPropertyPair.Key.AssertString($"{PipelineTemplateConstants.Snapshot} key"); + switch (propertyName.Value) + { + case PipelineTemplateConstants.ImageName: + imageName = snapshotPropertyPair.Value.AssertString($"{PipelineTemplateConstants.Snapshot} {propertyName}").Value; + break; + default: + propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Snapshot} key"); + break; + } + } + } + + if (String.IsNullOrEmpty(imageName)) + { + return null; + } + + return new Snapshot(imageName); + } + private static ActionStep ConvertToStep( TemplateContext context, TemplateToken stepsItem, diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs index 331b102469b..e5fbd5d2888 100644 --- a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs @@ -370,6 +370,32 @@ public IList> EvaluateJobServiceContainers( return result; } + public Snapshot EvaluateJobSnapshotRequest(TemplateToken token, + DictionaryContextData contextData, + IList expressionFunctions) + { + var result = default(Snapshot); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData, expressionFunctions); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Snapshot, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToJobSnapshotRequest(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result; + } + private TemplateContext CreateContext( DictionaryContextData contextData, IList expressionFunctions, diff --git a/src/Sdk/DTPipelines/Pipelines/Snapshot.cs b/src/Sdk/DTPipelines/Pipelines/Snapshot.cs new file mode 100644 index 00000000000..60f8da04f4f --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Snapshot.cs @@ -0,0 +1,17 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + public class Snapshot + { + public Snapshot(string imageName) + { + ImageName = imageName; + } + + [DataMember(EmitDefaultValue = false)] + public String ImageName { get; set; } + } +} diff --git a/src/Sdk/DTPipelines/workflow-v1.0.json b/src/Sdk/DTPipelines/workflow-v1.0.json index c1453f4e6ac..a3837edff02 100644 --- a/src/Sdk/DTPipelines/workflow-v1.0.json +++ b/src/Sdk/DTPipelines/workflow-v1.0.json @@ -71,7 +71,8 @@ "env": "job-env", "outputs": "job-outputs", "defaults": "job-defaults", - "steps": "steps" + "steps": "steps", + "snapshot": "snapshot" } } }, @@ -155,6 +156,24 @@ } }, + "snapshot": { + "one-of": [ + "non-empty-string", + "snapshot-mapping" + ] + }, + + "snapshot-mapping": { + "mapping": { + "properties": { + "image-name": { + "type": "non-empty-string", + "required": true + } + } + } + }, + "runs-on": { "context": [ "github", diff --git a/src/Sdk/DTWebApi/WebApi/BrokerMigrationMessage.cs b/src/Sdk/DTWebApi/WebApi/BrokerMigrationMessage.cs new file mode 100644 index 00000000000..eebfa4c7dcb --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/BrokerMigrationMessage.cs @@ -0,0 +1,38 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Message that tells the runner to redirect itself to BrokerListener for messages. + /// (Note that we use a special Message instead of a simple 302. This is because + /// the runner will need to apply the runner's token to the request, and it is + /// a security best practice to *not* blindly add sensitive data to redirects + /// 302s.) + /// + [DataContract] + public class BrokerMigrationMessage + { + public static readonly string MessageType = "BrokerMigration"; + + public BrokerMigrationMessage() + { + } + + public BrokerMigrationMessage( + Uri brokerUrl) + { + this.BrokerBaseUrl = brokerUrl; + } + + /// + /// The base url for the broker listener + /// + [DataMember] + public Uri BrokerBaseUrl + { + get; + internal set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentMessageTypes.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentMessageTypes.cs new file mode 100644 index 00000000000..5c8f95ed7cd --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentMessageTypes.cs @@ -0,0 +1,10 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + public sealed class TaskAgentMessageTypes + { + public static readonly string ForceTokenRefresh = "ForceTokenRefresh"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentSession.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentSession.cs index 8135de83b51..7b9fdfd0a1f 100644 --- a/src/Sdk/DTWebApi/WebApi/TaskAgentSession.cs +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentSession.cs @@ -75,5 +75,12 @@ public bool UseFipsEncryption get; set; } + + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public BrokerMigrationMessage BrokerMigrationMessage + { + get; + set; + } } } diff --git a/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs b/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs index 572b1f6426b..e1aa07417de 100644 --- a/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs +++ b/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs @@ -101,6 +101,7 @@ public class CoreAttachmentType public static readonly String FileAttachment = "DistributedTask.Core.FileAttachment"; public static readonly String DiagnosticLog = "DistributedTask.Core.DiagnosticLog"; public static readonly String ResultsLog = "Results.Core.Log"; + public static readonly String ResultsDiagnosticLog = "Results.Core.DiagnosticLog"; } [GenerateAllConstants] diff --git a/src/Sdk/RSWebApi/Contracts/AcquireJobRequest.cs b/src/Sdk/RSWebApi/Contracts/AcquireJobRequest.cs index c010f820879..c075d318e07 100644 --- a/src/Sdk/RSWebApi/Contracts/AcquireJobRequest.cs +++ b/src/Sdk/RSWebApi/Contracts/AcquireJobRequest.cs @@ -7,5 +7,8 @@ public class AcquireJobRequest { [DataMember(Name = "jobMessageId", EmitDefaultValue = false)] public string JobMessageId { get; set; } + + [DataMember(Name = "runnerOS", EmitDefaultValue = false)] + public string RunnerOS { get; set; } } } diff --git a/src/Sdk/RSWebApi/RunServiceHttpClient.cs b/src/Sdk/RSWebApi/RunServiceHttpClient.cs index bafcee5aabd..4d2b74f8c4c 100644 --- a/src/Sdk/RSWebApi/RunServiceHttpClient.cs +++ b/src/Sdk/RSWebApi/RunServiceHttpClient.cs @@ -59,12 +59,14 @@ public RunServiceHttpClient( public async Task GetJobMessageAsync( Uri requestUri, string messageId, + string runnerOS, CancellationToken cancellationToken = default) { HttpMethod httpMethod = new HttpMethod("POST"); var payload = new AcquireJobRequest { JobMessageId = messageId, + RunnerOS = runnerOS }; requestUri = new Uri(requestUri, "acquirejob"); diff --git a/src/Sdk/Sdk.csproj b/src/Sdk/Sdk.csproj index dbd96f33616..ff1cb85a4fe 100644 --- a/src/Sdk/Sdk.csproj +++ b/src/Sdk/Sdk.csproj @@ -13,6 +13,7 @@ + diff --git a/src/Sdk/WebApi/WebApi/BrokerHttpClient.cs b/src/Sdk/WebApi/WebApi/BrokerHttpClient.cs index fa914561c9c..e9ad938fb9f 100644 --- a/src/Sdk/WebApi/WebApi/BrokerHttpClient.cs +++ b/src/Sdk/WebApi/WebApi/BrokerHttpClient.cs @@ -57,6 +57,7 @@ public BrokerHttpClient( } public async Task GetRunnerMessageAsync( + Guid? sessionId, string runnerVersion, TaskAgentStatus? status, string os = null, @@ -69,6 +70,11 @@ public async Task GetRunnerMessageAsync( List> queryParams = new List>(); + if (sessionId != null) + { + queryParams.Add("sessionId", sessionId.Value.ToString()); + } + if (status != null) { queryParams.Add("status", status.Value.ToString()); @@ -104,12 +110,67 @@ public async Task GetRunnerMessageAsync( return result.Value; } + // the only time we throw a `Forbidden` exception from Listener /messages is when the runner is + // disable_update and is too old to poll if (result.StatusCode == HttpStatusCode.Forbidden) { - throw new AccessDeniedException(result.Error); + throw new AccessDeniedException($"{result.Error} Runner version v{runnerVersion} is deprecated and cannot receive messages.") + { + ErrorCode = 1 + }; } throw new Exception($"Failed to get job message: {result.Error}"); } + + public async Task CreateSessionAsync( + + TaskAgentSession session, + CancellationToken cancellationToken = default) + { + var requestUri = new Uri(Client.BaseAddress, "session"); + var requestContent = new ObjectContent(session, new VssJsonMediaTypeFormatter(true)); + + var result = await SendAsync( + new HttpMethod("POST"), + requestUri: requestUri, + content: requestContent, + cancellationToken: cancellationToken); + + if (result.IsSuccess) + { + return result.Value; + } + + if (result.StatusCode == HttpStatusCode.Forbidden) + { + throw new AccessDeniedException(result.Error); + } + + if (result.StatusCode == HttpStatusCode.Conflict) + { + throw new TaskAgentSessionConflictException(result.Error); + } + + throw new Exception($"Failed to create broker session: {result.Error}"); + } + + public async Task DeleteSessionAsync( + CancellationToken cancellationToken = default) + { + var requestUri = new Uri(Client.BaseAddress, $"session"); + + var result = await SendAsync( + new HttpMethod("DELETE"), + requestUri: requestUri, + cancellationToken: cancellationToken); + + if (result.IsSuccess) + { + return; + } + + throw new Exception($"Failed to delete broker session: {result.Error}"); + } } } diff --git a/src/Sdk/WebApi/WebApi/Contracts.cs b/src/Sdk/WebApi/WebApi/Contracts.cs index 95279e8ec30..0018062ea58 100644 --- a/src/Sdk/WebApi/WebApi/Contracts.cs +++ b/src/Sdk/WebApi/WebApi/Contracts.cs @@ -89,6 +89,26 @@ public class GetSignedStepLogsURLResponse public long SoftSizeLimit; } + [DataContract] + [JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))] + public class GetSignedDiagnosticLogsURLRequest + { + [DataMember] + public string WorkflowJobRunBackendId; + [DataMember] + public string WorkflowRunBackendId; + } + + [DataContract] + [JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))] + public class GetSignedDiagnosticLogsURLResponse + { + [DataMember] + public string DiagLogsURL; + [DataMember] + public string BlobStorageType; + } + [DataContract] [JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))] public class JobLogsMetadataCreate diff --git a/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs b/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs index 3bdc8cc0289..d206306f59f 100644 --- a/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs +++ b/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs @@ -1,6 +1,5 @@ using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Linq; using System.Net.Http; @@ -8,8 +7,11 @@ using System.Threading; using System.Threading.Tasks; using System.Net.Http.Formatting; +using Azure; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Blobs.Specialized; using GitHub.DistributedTask.WebApi; -using GitHub.Services.Common; using GitHub.Services.Results.Contracts; using Sdk.WebApi.WebApi; @@ -21,13 +23,15 @@ public ResultsHttpClient( Uri baseUrl, HttpMessageHandler pipeline, string token, - bool disposeHandler) + bool disposeHandler, + bool useSdk) : base(baseUrl, pipeline, disposeHandler) { m_token = token; m_resultsServiceUrl = baseUrl; m_formatter = new JsonMediaTypeFormatter(); m_changeIdCounter = 1; + m_useSdk = useSdk; } // Get Sas URL calls @@ -77,6 +81,19 @@ private async Task GetStepLogUploadUrlAsync(string return await GetResultsSignedURLResponse(getStepLogsSignedBlobURLEndpoint, cancellationToken, request); } + private async Task GetDiagnosticLogsUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken) + { + var request = new GetSignedDiagnosticLogsURLRequest() + { + WorkflowJobRunBackendId = jobId, + WorkflowRunBackendId = planId, + }; + + var getDiagnosticLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetJobDiagLogsSignedBlobURL); + + return await GetResultsSignedURLResponse(getDiagnosticLogsSignedBlobURLEndpoint, cancellationToken, request); + } + private async Task GetJobLogUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken) { var request = new GetSignedJobLogsURLRequest() @@ -91,7 +108,6 @@ private async Task GetJobLogUploadUrlAsync(string p } // Create metadata calls - private async Task SendRequest(Uri uri, CancellationToken cancellationToken, R request, string timestamp) { using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri)) @@ -161,73 +177,219 @@ private async Task JobLogUploadCompleteAsync(string planId, string jobId, long l await SendRequest(createJobLogsMetadataEndpoint, cancellationToken, request, timestamp); } - private async Task UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken) + private (Uri path, string sas) ParseSasToken(string url) { - // Upload the file to the url - var request = new HttpRequestMessage(HttpMethod.Put, url) + if (String.IsNullOrEmpty(url)) { - Content = new StreamContent(file) - }; - - if (blobStorageType == BlobStorageTypes.AzureBlobStorage) - { - request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob); + throw new Exception($"SAS url is empty"); } - using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken)) + var blobUri = new UriBuilder(url); + var sasUrl = blobUri.Query.Substring(1); //remove starting "?" + blobUri.Query = null; // remove query params + return (blobUri.Uri, sasUrl); + } + + private BlobClient GetBlobClient(string url) + { + var blobUri = ParseSasToken(url); + + var opts = new BlobClientOptions { - if (!response.IsSuccessStatusCode) + Retry = { - throw new Exception($"Failed to upload file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}"); + MaxRetries = Constants.DefaultBlobUploadRetries, + NetworkTimeout = TimeSpan.FromSeconds(Constants.DefaultNetworkTimeoutInSeconds) } - return response; - } + }; + + return new BlobClient(blobUri.path, new AzureSasCredential(blobUri.sas), opts); } - private async Task CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken) + private AppendBlobClient GetAppendBlobClient(string url) { - var request = new HttpRequestMessage(HttpMethod.Put, url) + var blobUri = ParseSasToken(url); + + var opts = new BlobClientOptions { - Content = new StringContent("") + Retry = + { + MaxRetries = Constants.DefaultBlobUploadRetries, + NetworkTimeout = TimeSpan.FromSeconds(Constants.DefaultNetworkTimeoutInSeconds) + } }; - if (blobStorageType == BlobStorageTypes.AzureBlobStorage) + + return new AppendBlobClient(blobUri.path, new AzureSasCredential(blobUri.sas), opts); + } + + private async Task UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken, Dictionary customHeaders = null) + { + if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage) { - request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob); - request.Content.Headers.Add("Content-Length", "0"); + var blobClient = GetBlobClient(url); + var httpHeaders = new BlobHttpHeaders(); + if (customHeaders != null) + { + foreach (var header in customHeaders) + { + switch (header.Key) + { + case Constants.ContentTypeHeader: + httpHeaders.ContentType = header.Value; + break; + } + } + } + try + { + await blobClient.UploadAsync(file, new BlobUploadOptions() + { + HttpHeaders = httpHeaders, + Conditions = new BlobRequestConditions + { + IfNoneMatch = new ETag("*") + } + }, cancellationToken); + } + catch (RequestFailedException e) + { + throw new Exception($"Failed to upload block to Azure blob: {e.Message}"); + } } - - using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken)) + else { - if (!response.IsSuccessStatusCode) + // Upload the file to the url + var request = new HttpRequestMessage(HttpMethod.Put, url) + { + Content = new StreamContent(file) + }; + + if (blobStorageType == BlobStorageTypes.AzureBlobStorage) { - throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}"); + request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob); + } + + if (customHeaders != null) + { + foreach (var header in customHeaders) + { + request.Content.Headers.Add(header.Key, header.Value); + } + }; + + using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken)) + { + if (!response.IsSuccessStatusCode) + { + throw new Exception($"Failed to upload file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}"); + } } - return response; } } - private async Task UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken) + private async Task CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken, Dictionary customHeaders = null) { - var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock"; - // Upload the file to the url - var request = new HttpRequestMessage(HttpMethod.Put, url + comp) + if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage) { - Content = new StreamContent(file) - }; - - if (blobStorageType == BlobStorageTypes.AzureBlobStorage) + var appendBlobClient = GetAppendBlobClient(url); + var httpHeaders = new BlobHttpHeaders(); + if (customHeaders != null) + { + foreach (var header in customHeaders) + { + switch (header.Key) + { + case Constants.ContentTypeHeader: + httpHeaders.ContentType = header.Value; + break; + } + } + } + try + { + await appendBlobClient.CreateAsync(new AppendBlobCreateOptions() + { + HttpHeaders = httpHeaders, + Conditions = new AppendBlobRequestConditions + { + IfNoneMatch = new ETag("*") + } + }, cancellationToken: cancellationToken); + } + catch (RequestFailedException e) + { + throw new Exception($"Failed to create append blob in Azure blob: {e.Message}"); + } + } + else { - request.Content.Headers.Add("Content-Length", fileSize.ToString()); - request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString()); + var request = new HttpRequestMessage(HttpMethod.Put, url) + { + Content = new StringContent("") + }; + if (blobStorageType == BlobStorageTypes.AzureBlobStorage) + { + request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob); + request.Content.Headers.Add("Content-Length", "0"); + } + if (customHeaders != null) + { + foreach (var header in customHeaders) + { + request.Content.Headers.Add(header.Key, header.Value); + } + }; + + using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken)) + { + if (!response.IsSuccessStatusCode) + { + throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}"); + } + } } + } - using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken)) + private async Task UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken) + { + if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage) { - if (!response.IsSuccessStatusCode) + var appendBlobClient = GetAppendBlobClient(url); + try + { + await appendBlobClient.AppendBlockAsync(file, cancellationToken: cancellationToken); + if (finalize) + { + await appendBlobClient.SealAsync(cancellationToken: cancellationToken); + } + } + catch (RequestFailedException e) { - throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}"); + throw new Exception($"Failed to upload append block in Azure blob: {e.Message}"); + } + } + else + { + var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock"; + // Upload the file to the url + var request = new HttpRequestMessage(HttpMethod.Put, url + comp) + { + Content = new StreamContent(file) + }; + + if (blobStorageType == BlobStorageTypes.AzureBlobStorage) + { + request.Content.Headers.Add("Content-Length", fileSize.ToString()); + request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString()); + } + + using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken)) + { + if (!response.IsSuccessStatusCode) + { + throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}"); + } } - return response; } } @@ -251,23 +413,22 @@ public async Task UploadStepSummaryAsync(string planId, string jobId, Guid stepI // Upload the file using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true)) { - var response = await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken); + await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken); } // Send step summary upload complete message await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken); } - private async Task UploadLogFile(string file, bool finalize, bool firstBlock, string sasUrl, string blobStorageType, - CancellationToken cancellationToken) + private async Task UploadLogFile(string file, bool finalize, bool firstBlock, string sasUrl, string blobStorageType, + CancellationToken cancellationToken, Dictionary customHeaders = null) { - HttpResponseMessage response; if (firstBlock && finalize) { // This is the one and only block, just use a block blob using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true)) { - response = await UploadBlockFileAsync(sasUrl, blobStorageType, fileStream, cancellationToken); + await UploadBlockFileAsync(sasUrl, blobStorageType, fileStream, cancellationToken, customHeaders); } } else @@ -276,18 +437,16 @@ private async Task UploadLogFile(string file, bool finalize // Create the Append blob if (firstBlock) { - await CreateAppendFileAsync(sasUrl, blobStorageType, cancellationToken); + await CreateAppendFileAsync(sasUrl, blobStorageType, cancellationToken, customHeaders); } // Upload content var fileSize = new FileInfo(file).Length; using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true)) { - response = await UploadAppendFileAsync(sasUrl, blobStorageType, fileStream, finalize, fileSize, cancellationToken); + await UploadAppendFileAsync(sasUrl, blobStorageType, fileStream, finalize, fileSize, cancellationToken); } } - - return response; } // Handle file upload for step log @@ -300,7 +459,12 @@ public async Task UploadResultsStepLogAsync(string planId, string jobId, Guid st throw new Exception("Failed to get step log upload url"); } - await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken); + var customHeaders = new Dictionary + { + { Constants.ContentTypeHeader, Constants.TextPlainContentType } + }; + + await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken, customHeaders); // Update metadata if (finalize) @@ -320,7 +484,12 @@ public async Task UploadResultsJobLogAsync(string planId, string jobId, string f throw new Exception("Failed to get job log upload url"); } - await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken); + var customHeaders = new Dictionary + { + { Constants.ContentTypeHeader, Constants.TextPlainContentType } + }; + + await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken, customHeaders); // Update metadata if (finalize) @@ -330,6 +499,18 @@ public async Task UploadResultsJobLogAsync(string planId, string jobId, string f } } + public async Task UploadResultsDiagnosticLogsAsync(string planId, string jobId, string file, CancellationToken cancellationToken) + { + // Get the upload url + var uploadUrlResponse = await GetDiagnosticLogsUploadUrlAsync(planId, jobId, cancellationToken); + if (uploadUrlResponse == null || uploadUrlResponse.DiagLogsURL == null) + { + throw new Exception("Failed to get diagnostic logs upload url"); + } + + await UploadLogFile(file, true, true, uploadUrlResponse.DiagLogsURL, uploadUrlResponse.BlobStorageType, cancellationToken); + } + private Step ConvertTimelineRecordToStep(TimelineRecord r) { return new Step() @@ -405,6 +586,7 @@ public async Task UpdateWorkflowStepsAsync(Guid planId, IEnumerable _config; + private readonly Mock _brokerServer; + private readonly Mock _credMgr; + private Mock _store; + + + public BrokerMessageListenerL0() + { + _settings = new RunnerSettings { AgentId = 1, AgentName = "myagent", PoolId = 123, PoolName = "default", ServerUrl = "http://myserver", WorkFolder = "_work", ServerUrlV2 = "http://myserverv2" }; + _config = new Mock(); + _config.Setup(x => x.LoadSettings()).Returns(_settings); + _credMgr = new Mock(); + _store = new Mock(); + _brokerServer = new Mock(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async void CreatesSession() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession(); + _brokerServer + .Setup(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); + _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); + + // Act. + BrokerMessageListener listener = new(); + listener.Initialize(tc); + + bool result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + + // Assert. + Assert.True(result); + _brokerServer + .Verify(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token), Times.Once()); + } + } + + private TestHostContext CreateTestContext([CallerMemberName] String testName = "") + { + TestHostContext tc = new(this, testName); + tc.SetSingleton(_config.Object); + tc.SetSingleton(_credMgr.Object); + tc.SetSingleton(_store.Object); + tc.SetSingleton(_brokerServer.Object); + return tc; + } + } +} diff --git a/src/Test/L0/Listener/JobDispatcherL0.cs b/src/Test/L0/Listener/JobDispatcherL0.cs index 9057e6bbf6a..4d3f258c86c 100644 --- a/src/Test/L0/Listener/JobDispatcherL0.cs +++ b/src/Test/L0/Listener/JobDispatcherL0.cs @@ -41,7 +41,7 @@ private Pipelines.AgentJobRequestMessage CreateJobRequestMessage() TaskOrchestrationPlanReference plan = new(); TimelineReference timeline = null; Guid jobId = Guid.NewGuid(); - var result = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "someJob", "someJob", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var result = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "someJob", "someJob", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); result.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData(); return result; } @@ -806,7 +806,8 @@ private static AgentJobRequestMessage GetAgentJobRequestMessage() }, null, new List(), - new ActionsEnvironmentReference("env") + new ActionsEnvironmentReference("env"), + null ); return message; } diff --git a/src/Test/L0/Listener/MessageListenerL0.cs b/src/Test/L0/Listener/MessageListenerL0.cs index 7cd6035e19b..57a1f60d800 100644 --- a/src/Test/L0/Listener/MessageListenerL0.cs +++ b/src/Test/L0/Listener/MessageListenerL0.cs @@ -24,6 +24,8 @@ public sealed class MessageListenerL0 private Mock _credMgr; private Mock _store; + private Mock _brokerServer; + public MessageListenerL0() { _settings = new RunnerSettings { AgentId = 1, AgentName = "myagent", PoolId = 123, PoolName = "default", ServerUrl = "http://myserver", WorkFolder = "_work" }; @@ -32,6 +34,7 @@ public MessageListenerL0() _runnerServer = new Mock(); _credMgr = new Mock(); _store = new Mock(); + _brokerServer = new Mock(); } private TestHostContext CreateTestContext([CallerMemberName] String testName = "") @@ -41,6 +44,7 @@ private TestHostContext CreateTestContext([CallerMemberName] String testName = " tc.SetSingleton(_runnerServer.Object); tc.SetSingleton(_credMgr.Object); tc.SetSingleton(_store.Object); + tc.SetSingleton(_brokerServer.Object); return tc; } @@ -81,6 +85,72 @@ public async void CreatesSession() _settings.PoolId, It.Is(y => y != null), tokenSource.Token), Times.Once()); + _brokerServer + .Verify(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token), Times.Never()); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async void CreatesSessionWithBrokerMigration() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession() + { + OwnerName = "legacy", + BrokerMigrationMessage = new BrokerMigrationMessage(new Uri("https://broker.actions.github.com")) + }; + + var expectedBrokerSession = new TaskAgentSession() + { + OwnerName = "broker" + }; + + _runnerServer + .Setup(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _brokerServer + .Setup(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedBrokerSession)); + + _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); + _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); + + // Act. + MessageListener listener = new(); + listener.Initialize(tc); + + bool result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + + // Assert. + Assert.True(result); + + _runnerServer + .Verify(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token), Times.Once()); + + _brokerServer + .Verify(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token), Times.Once()); } } @@ -131,6 +201,83 @@ public async void DeleteSession() } } + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async void DeleteSessionWithBrokerMigration() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession() + { + OwnerName = "legacy", + BrokerMigrationMessage = new BrokerMigrationMessage(new Uri("https://broker.actions.github.com")) + }; + + var expectedBrokerSession = new TaskAgentSession() + { + SessionId = Guid.NewGuid(), + OwnerName = "broker" + }; + + _runnerServer + .Setup(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _brokerServer + .Setup(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedBrokerSession)); + + _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); + _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); + + // Act. + MessageListener listener = new(); + listener.Initialize(tc); + + bool result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + + Assert.True(result); + + _runnerServer + .Verify(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token), Times.Once()); + + _brokerServer + .Verify(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token), Times.Once()); + + _brokerServer + .Setup(x => x.DeleteSessionAsync(It.IsAny())) + .Returns(Task.CompletedTask); + + // Act. + await listener.DeleteSessionAsync(); + + + //Assert + _runnerServer + .Verify(x => x.DeleteAgentSessionAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny()), Times.Never()); + _brokerServer + .Verify(x => x.DeleteSessionAsync(It.IsAny()), Times.Once()); + } + } + [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] @@ -212,6 +359,112 @@ public async void GetNextMessage() } } + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async void GetNextMessageWithBrokerMigration() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession(); + PropertyInfo sessionIdProperty = expectedSession.GetType().GetProperty("SessionId", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(sessionIdProperty); + sessionIdProperty.SetValue(expectedSession, Guid.NewGuid()); + + _runnerServer + .Setup(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); + _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); + + // Act. + MessageListener listener = new(); + listener.Initialize(tc); + + bool result = await listener.CreateSessionAsync(tokenSource.Token); + Assert.True(result); + + var brokerMigrationMesage = new BrokerMigrationMessage(new Uri("https://actions.broker.com")); + + var arMessages = new TaskAgentMessage[] + { + new TaskAgentMessage + { + Body = JsonUtility.ToString(brokerMigrationMesage), + MessageType = BrokerMigrationMessage.MessageType + }, + }; + + var brokerMessages = new TaskAgentMessage[] + { + new TaskAgentMessage + { + Body = "somebody1", + MessageId = 4234, + MessageType = JobRequestMessageTypes.PipelineAgentJobRequest + }, + new TaskAgentMessage + { + Body = "somebody2", + MessageId = 4235, + MessageType = JobCancelMessage.MessageType + }, + null, //should be skipped by GetNextMessageAsync implementation + null, + new TaskAgentMessage + { + Body = "somebody3", + MessageId = 4236, + MessageType = JobRequestMessageTypes.PipelineAgentJobRequest + } + }; + var brokerMessageQueue = new Queue(brokerMessages); + + _runnerServer + .Setup(x => x.GetAgentMessageAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny(), TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) => + { + await Task.Yield(); + return arMessages[0]; // always send migration message + }); + + _brokerServer + .Setup(x => x.GetRunnerMessageAsync( + expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(async (Guid sessionId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) => + { + await Task.Yield(); + return brokerMessageQueue.Dequeue(); + }); + + TaskAgentMessage message1 = await listener.GetNextMessageAsync(tokenSource.Token); + TaskAgentMessage message2 = await listener.GetNextMessageAsync(tokenSource.Token); + TaskAgentMessage message3 = await listener.GetNextMessageAsync(tokenSource.Token); + Assert.Equal(brokerMessages[0], message1); + Assert.Equal(brokerMessages[1], message2); + Assert.Equal(brokerMessages[4], message3); + + //Assert + _runnerServer + .Verify(x => x.GetAgentMessageAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny(), TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(brokerMessages.Length)); + + _brokerServer + .Verify(x => x.GetRunnerMessageAsync( + expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(brokerMessages.Length)); + } + } + [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] diff --git a/src/Test/L0/Listener/RunnerL0.cs b/src/Test/L0/Listener/RunnerL0.cs index 47df4de25e3..9c57f2adc36 100644 --- a/src/Test/L0/Listener/RunnerL0.cs +++ b/src/Test/L0/Listener/RunnerL0.cs @@ -42,7 +42,7 @@ private Pipelines.AgentJobRequestMessage CreateJobRequestMessage(string jobName) TaskOrchestrationPlanReference plan = new(); TimelineReference timeline = null; Guid jobId = Guid.NewGuid(); - return new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + return new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); } private JobCancelMessage CreateJobCancelMessage() diff --git a/src/Test/L0/Listener/SelfUpdaterL0.cs b/src/Test/L0/Listener/SelfUpdaterL0.cs index ffa26d917a4..ed3dd5dfb4f 100644 --- a/src/Test/L0/Listener/SelfUpdaterL0.cs +++ b/src/Test/L0/Listener/SelfUpdaterL0.cs @@ -24,7 +24,6 @@ public sealed class SelfUpdaterL0 private Mock _configStore; private Mock _jobDispatcher; private AgentRefreshMessage _refreshMessage = new(1, "2.999.0"); - private List _trimmedPackages = new(); #if !OS_WINDOWS private string _packageUrl = null; @@ -72,12 +71,6 @@ private async Task FetchLatestRunner() } } - using (var client = new HttpClient()) - { - var json = await client.GetStringAsync($"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}-trimmedpackages.json"); - _trimmedPackages = StringUtil.ConvertFromJson>(json); - } - _runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny())) .Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl })); @@ -92,12 +85,10 @@ public async void TestSelfUpdateAsync() { await FetchLatestRunner(); Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); using (var hc = new TestHostContext(this)) { hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); //Arrange var updater = new Runner.Listener.SelfUpdater(); @@ -153,12 +144,10 @@ public async void TestSelfUpdateAsync_NoUpdateOnOldVersion() { await FetchLatestRunner(); Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); using (var hc = new TestHostContext(this)) { hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); //Arrange var updater = new Runner.Listener.SelfUpdater(); @@ -206,12 +195,10 @@ public async void TestSelfUpdateAsync_DownloadRetry() { await FetchLatestRunner(); Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); using (var hc = new TestHostContext(this)) { hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); //Arrange var updater = new Runner.Listener.SelfUpdater(); @@ -261,12 +248,10 @@ public async void TestSelfUpdateAsync_ValidateHash() { await FetchLatestRunner(); Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); using (var hc = new TestHostContext(this)) { hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); //Arrange var updater = new Runner.Listener.SelfUpdater(); @@ -306,495 +291,6 @@ public async void TestSelfUpdateAsync_ValidateHash() Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null); } } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Runner")] - public async void TestSelfUpdateAsync_CloneHash_RuntimeAndExternals() - { - try - { - await FetchLatestRunner(); - Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); - using (var hc = new TestHostContext(this)) - { - hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); - - //Arrange - var updater = new Runner.Listener.SelfUpdater(); - hc.SetSingleton(_term.Object); - hc.SetSingleton(_runnerServer.Object); - hc.SetSingleton(_configStore.Object); - hc.SetSingleton(new HttpClientHandlerFactory()); - - var p1 = new ProcessInvokerWrapper(); - p1.Initialize(hc); - var p2 = new ProcessInvokerWrapper(); - p2.Initialize(hc); - var p3 = new ProcessInvokerWrapper(); - p3.Initialize(hc); - hc.EnqueueInstance(p1); - hc.EnqueueInstance(p2); - hc.EnqueueInstance(p3); - updater.Initialize(hc); - - _runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny())) - .Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = new List() { new TrimmedPackageMetadata() } })); - - _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny())) - .Callback((int p, ulong a, string s, string t) => - { - hc.GetTrace().Info(t); - }) - .Returns(Task.FromResult(new TaskAgent())); - - try - { - var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken); - Assert.True(result); - Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"))); - Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"))); - - FieldInfo contentHashesProperty = updater.GetType().GetField("_contentHashes", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); - Assert.NotNull(contentHashesProperty); - Dictionary contentHashes = (Dictionary)contentHashesProperty.GetValue(updater); - hc.GetTrace().Info(StringUtil.ConvertToJson(contentHashes)); - - var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}"); - var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}"); - - Assert.Equal(File.ReadAllText(dotnetRuntimeHashFile).Trim(), contentHashes["dotnetRuntime"]); - Assert.Equal(File.ReadAllText(externalsHashFile).Trim(), contentHashes["externals"]); - } - finally - { - IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None); - IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None); - } - } - } - finally - { - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Runner")] - public async void TestSelfUpdateAsync_Cancel_CloneHashTask_WhenNotNeeded() - { - try - { - await FetchLatestRunner(); - Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); - using (var hc = new TestHostContext(this)) - { - hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); - - //Arrange - var updater = new Runner.Listener.SelfUpdater(); - hc.SetSingleton(_term.Object); - hc.SetSingleton(_runnerServer.Object); - hc.SetSingleton(_configStore.Object); - hc.SetSingleton(new Mock().Object); - - var p1 = new ProcessInvokerWrapper(); - p1.Initialize(hc); - var p2 = new ProcessInvokerWrapper(); - p2.Initialize(hc); - var p3 = new ProcessInvokerWrapper(); - p3.Initialize(hc); - hc.EnqueueInstance(p1); - hc.EnqueueInstance(p2); - hc.EnqueueInstance(p3); - updater.Initialize(hc); - - _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny())) - .Callback((int p, ulong a, string s, string t) => - { - hc.GetTrace().Info(t); - }) - .Returns(Task.FromResult(new TaskAgent())); - - try - { - var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken); - - FieldInfo contentHashesProperty = updater.GetType().GetField("_contentHashes", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); - Assert.NotNull(contentHashesProperty); - Dictionary contentHashes = (Dictionary)contentHashesProperty.GetValue(updater); - hc.GetTrace().Info(StringUtil.ConvertToJson(contentHashes)); - - Assert.NotEqual(2, contentHashes.Count); - } - catch (Exception ex) - { - hc.GetTrace().Error(ex); - } - } - } - finally - { - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Runner")] - public async void TestSelfUpdateAsync_UseExternalsTrimmedPackage() - { - try - { - await FetchLatestRunner(); - Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); - using (var hc = new TestHostContext(this)) - { - hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); - - //Arrange - var updater = new Runner.Listener.SelfUpdater(); - hc.SetSingleton(_term.Object); - hc.SetSingleton(_runnerServer.Object); - hc.SetSingleton(_configStore.Object); - hc.SetSingleton(new HttpClientHandlerFactory()); - - var p1 = new ProcessInvokerWrapper(); // hashfiles - p1.Initialize(hc); - var p2 = new ProcessInvokerWrapper(); // hashfiles - p2.Initialize(hc); - var p3 = new ProcessInvokerWrapper(); // un-tar - p3.Initialize(hc); - var p4 = new ProcessInvokerWrapper(); // node -v - p4.Initialize(hc); - var p5 = new ProcessInvokerWrapper(); // node -v - p5.Initialize(hc); - hc.EnqueueInstance(p1); - hc.EnqueueInstance(p2); - hc.EnqueueInstance(p3); - hc.EnqueueInstance(p4); - hc.EnqueueInstance(p5); - updater.Initialize(hc); - - var trim = _trimmedPackages.Where(x => !x.TrimmedContents.ContainsKey("dotnetRuntime")).ToList(); - _runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny())) - .Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim })); - - _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny())) - .Callback((int p, ulong a, string s, string t) => - { - hc.GetTrace().Info(t); - }) - .Returns(Task.FromResult(new TaskAgent())); - - try - { - var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken); - Assert.True(result); - Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"))); - Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"))); - } - finally - { - IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None); - IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None); - } - - var traceFile = Path.GetTempFileName(); - File.Copy(hc.TraceFileName, traceFile, true); - - var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}"); - var externalsHash = await File.ReadAllTextAsync(externalsHashFile); - - if (externalsHash == trim[0].TrimmedContents["externals"]) - { - Assert.Contains("Use trimmed (externals) package", File.ReadAllText(traceFile)); - } - else - { - Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile)); - } - } - } - finally - { - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Runner")] - public async void TestSelfUpdateAsync_UseExternalsRuntimeTrimmedPackage() - { - try - { - await FetchLatestRunner(); - Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); - using (var hc = new TestHostContext(this)) - { - hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); - - //Arrange - var updater = new Runner.Listener.SelfUpdater(); - hc.SetSingleton(_term.Object); - hc.SetSingleton(_runnerServer.Object); - hc.SetSingleton(_configStore.Object); - hc.SetSingleton(new HttpClientHandlerFactory()); - - var p1 = new ProcessInvokerWrapper(); // hashfiles - p1.Initialize(hc); - var p2 = new ProcessInvokerWrapper(); // hashfiles - p2.Initialize(hc); - var p3 = new ProcessInvokerWrapper(); // un-tar - p3.Initialize(hc); - var p4 = new ProcessInvokerWrapper(); // node -v - p4.Initialize(hc); - var p5 = new ProcessInvokerWrapper(); // node -v - p5.Initialize(hc); - var p6 = new ProcessInvokerWrapper(); // runner -v - p6.Initialize(hc); - hc.EnqueueInstance(p1); - hc.EnqueueInstance(p2); - hc.EnqueueInstance(p3); - hc.EnqueueInstance(p4); - hc.EnqueueInstance(p5); - hc.EnqueueInstance(p6); - updater.Initialize(hc); - - var trim = _trimmedPackages.Where(x => x.TrimmedContents.ContainsKey("dotnetRuntime") && x.TrimmedContents.ContainsKey("externals")).ToList(); - _runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny())) - .Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim })); - - _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny())) - .Callback((int p, ulong a, string s, string t) => - { - hc.GetTrace().Info(t); - }) - .Returns(Task.FromResult(new TaskAgent())); - - try - { - var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken); - Assert.True(result); - Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"))); - Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"))); - } - finally - { - IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None); - IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None); - } - - var traceFile = Path.GetTempFileName(); - File.Copy(hc.TraceFileName, traceFile, true); - - var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}"); - var externalsHash = await File.ReadAllTextAsync(externalsHashFile); - - var runtimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}"); - var runtimeHash = await File.ReadAllTextAsync(runtimeHashFile); - - if (externalsHash == trim[0].TrimmedContents["externals"] && - runtimeHash == trim[0].TrimmedContents["dotnetRuntime"]) - { - Assert.Contains("Use trimmed (runtime+externals) package", File.ReadAllText(traceFile)); - } - else - { - Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile)); - } - } - } - finally - { - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Runner")] - public async void TestSelfUpdateAsync_NotUseExternalsRuntimeTrimmedPackageOnHashMismatch() - { - try - { - await FetchLatestRunner(); - Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); - using (var hc = new TestHostContext(this)) - { - hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); - - //Arrange - var updater = new Runner.Listener.SelfUpdater(); - hc.SetSingleton(_term.Object); - hc.SetSingleton(_runnerServer.Object); - hc.SetSingleton(_configStore.Object); - hc.SetSingleton(new HttpClientHandlerFactory()); - - var p1 = new ProcessInvokerWrapper(); // hashfiles - p1.Initialize(hc); - var p2 = new ProcessInvokerWrapper(); // hashfiles - p2.Initialize(hc); - var p3 = new ProcessInvokerWrapper(); // un-tar - p3.Initialize(hc); - var p4 = new ProcessInvokerWrapper(); // node -v - p4.Initialize(hc); - var p5 = new ProcessInvokerWrapper(); // node -v - p5.Initialize(hc); - var p6 = new ProcessInvokerWrapper(); // runner -v - p6.Initialize(hc); - hc.EnqueueInstance(p1); - hc.EnqueueInstance(p2); - hc.EnqueueInstance(p3); - hc.EnqueueInstance(p4); - hc.EnqueueInstance(p5); - hc.EnqueueInstance(p6); - updater.Initialize(hc); - - var trim = _trimmedPackages.ToList(); - foreach (var package in trim) - { - foreach (var hash in package.TrimmedContents.Keys) - { - package.TrimmedContents[hash] = "mismatch"; - } - } - - _runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny())) - .Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim })); - - - _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny())) - .Callback((int p, ulong a, string s, string t) => - { - hc.GetTrace().Info(t); - }) - .Returns(Task.FromResult(new TaskAgent())); - - try - { - var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken); - Assert.True(result); - Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"))); - Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"))); - } - finally - { - IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None); - IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None); - } - - var traceFile = Path.GetTempFileName(); - File.Copy(hc.TraceFileName, traceFile, true); - Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile)); - } - } - finally - { - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Runner")] - public async void TestSelfUpdateAsync_FallbackToFullPackage() - { - try - { - await FetchLatestRunner(); - Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); - using (var hc = new TestHostContext(this)) - { - hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); - - //Arrange - var updater = new Runner.Listener.SelfUpdater(); - hc.SetSingleton(_term.Object); - hc.SetSingleton(_runnerServer.Object); - hc.SetSingleton(_configStore.Object); - hc.SetSingleton(new HttpClientHandlerFactory()); - - var p1 = new ProcessInvokerWrapper(); // hashfiles - p1.Initialize(hc); - var p2 = new ProcessInvokerWrapper(); // hashfiles - p2.Initialize(hc); - var p3 = new ProcessInvokerWrapper(); // un-tar trim - p3.Initialize(hc); - var p4 = new ProcessInvokerWrapper(); // un-tar full - p4.Initialize(hc); - hc.EnqueueInstance(p1); - hc.EnqueueInstance(p2); - hc.EnqueueInstance(p3); - hc.EnqueueInstance(p4); - updater.Initialize(hc); - - var trim = _trimmedPackages.ToList(); - foreach (var package in trim) - { - package.HashValue = "mismatch"; - } - - _runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny())) - .Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim })); - - _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny())) - .Callback((int p, ulong a, string s, string t) => - { - hc.GetTrace().Info(t); - }) - .Returns(Task.FromResult(new TaskAgent())); - - try - { - var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken); - Assert.True(result); - Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"))); - Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"))); - } - finally - { - IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None); - IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None); - } - - var traceFile = Path.GetTempFileName(); - File.Copy(hc.TraceFileName, traceFile, true); - if (File.ReadAllText(traceFile).Contains("Use trimmed (runtime+externals) package")) - { - Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile)); - } - else - { - hc.GetTrace().Warning("Skipping the 'TestSelfUpdateAsync_FallbackToFullPackage' test, as the `externals` or `runtime` hashes have been updated"); - } - } - } - finally - { - Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null); - } - } } #endif } diff --git a/src/Test/L0/Listener/SelfUpdaterV2L0.cs b/src/Test/L0/Listener/SelfUpdaterV2L0.cs index 51004e1fb0c..5115a6bbb7f 100644 --- a/src/Test/L0/Listener/SelfUpdaterV2L0.cs +++ b/src/Test/L0/Listener/SelfUpdaterV2L0.cs @@ -23,7 +23,6 @@ public sealed class SelfUpdaterV2L0 private Mock _configStore; private Mock _jobDispatcher; private AgentRefreshMessage _refreshMessage = new(1, "2.999.0"); - private List _trimmedPackages = new(); #if !OS_WINDOWS private string _packageUrl = null; @@ -81,12 +80,10 @@ public async void TestSelfUpdateAsync() { await FetchLatestRunner(); Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); using (var hc = new TestHostContext(this)) { hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); //Arrange var updater = new Runner.Listener.SelfUpdaterV2(); @@ -143,12 +140,10 @@ public async void TestSelfUpdateAsync_DownloadRetry() { await FetchLatestRunner(); Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); using (var hc = new TestHostContext(this)) { hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); //Arrange var updater = new Runner.Listener.SelfUpdaterV2(); @@ -194,12 +189,10 @@ public async void TestSelfUpdateAsync_ValidateHash() { await FetchLatestRunner(); Assert.NotNull(_packageUrl); - Assert.NotNull(_trimmedPackages); Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin"))); using (var hc = new TestHostContext(this)) { hc.GetTrace().Info(_packageUrl); - hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages)); //Arrange var updater = new Runner.Listener.SelfUpdaterV2(); diff --git a/src/Test/L0/PackagesTrimL0.cs b/src/Test/L0/PackagesTrimL0.cs deleted file mode 100644 index 5a3a98b7aaa..00000000000 --- a/src/Test/L0/PackagesTrimL0.cs +++ /dev/null @@ -1,303 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Threading; -using System.Threading.Channels; -using System.Threading.Tasks; -using GitHub.Runner.Common.Util; -using GitHub.Runner.Sdk; -using Xunit; - -namespace GitHub.Runner.Common.Tests -{ -#if DISABLED - public sealed class PackagesTrimL0 - { - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public async Task RunnerLayoutParts_NewFilesCrossAll() - { - using (TestHostContext hc = new(this)) - { - Tracing trace = hc.GetTrace(); - var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets"); - var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets"); - string layoutBin = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin"); - var newFiles = new List(); - if (Directory.Exists(layoutBin)) - { - var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile); - var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile); - foreach (var file in Directory.GetFiles(layoutBin, "*", SearchOption.AllDirectories)) - { - if (!coreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x)) && - !runtimeAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x))) - { - newFiles.Add(file); - } - } - - if (newFiles.Count > 0) - { - Assert.True(false, $"Found new files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages."); - } - } - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public async Task RunnerLayoutParts_OverlapFiles() - { - using (TestHostContext hc = new(this)) - { - Tracing trace = hc.GetTrace(); - var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets"); - var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets"); - - var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile); - var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile); - - foreach (var line in coreAssets) - { - if (runtimeAssets.Contains(line, StringComparer.OrdinalIgnoreCase)) - { - Assert.True(false, $"'Misc/runnercoreassets' and 'Misc/runnerdotnetruntimeassets' should not overlap."); - } - } - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public async Task RunnerLayoutParts_NewRunnerCoreAssets() - { - using (TestHostContext hc = new(this)) - { - Tracing trace = hc.GetTrace(); - var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets"); - var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile); - - string layoutBin = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin"); - var newFiles = new List(); - if (Directory.Exists(layoutBin)) - { - var binDirs = Directory.GetDirectories(TestUtil.GetSrcPath(), "net6.0", SearchOption.AllDirectories); - foreach (var binDir in binDirs) - { - if (binDir.Contains("Test") || binDir.Contains("obj")) - { - continue; - } - - Directory.GetFiles(binDir, "*", SearchOption.TopDirectoryOnly).ToList().ForEach(x => - { - if (!x.Contains("runtimeconfig.dev.json")) - { - if (!coreAssets.Any(y => x.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(y))) - { - newFiles.Add(x); - } - } - }); - } - - if (newFiles.Count > 0) - { - Assert.True(false, $"Found new files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages. You might need to update `Misc/runnercoreassets`."); - } - } - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public async Task RunnerLayoutParts_NewDotnetRuntimeAssets() - { - using (TestHostContext hc = new(this)) - { - Tracing trace = hc.GetTrace(); - var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets"); - var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile); - - string layoutTrimsRuntimeAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/runnerdotnetruntimeassets"); - var newFiles = new List(); - if (File.Exists(layoutTrimsRuntimeAssets)) - { - var runtimeAssetsCurrent = await File.ReadAllLinesAsync(layoutTrimsRuntimeAssets); - foreach (var runtimeFile in runtimeAssetsCurrent) - { - if (runtimeAssets.Any(x => runtimeFile.EndsWith(x, StringComparison.OrdinalIgnoreCase))) - { - continue; - } - else - { - newFiles.Add(runtimeFile); - } - } - - if (newFiles.Count > 0) - { - Assert.True(false, $"Found new dotnet runtime files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages. You might need to update `Misc/runnerdotnetruntimeassets`."); - } - } - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public async Task RunnerLayoutParts_CheckDotnetRuntimeHash() - { - using (TestHostContext hc = new(this)) - { - Tracing trace = hc.GetTrace(); - var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}"); - trace.Info($"Current hash: {File.ReadAllText(dotnetRuntimeHashFile)}"); - string layoutTrimsRuntimeAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/runtime"); - - string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin"); - -#if OS_WINDOWS - string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node16\bin\node"); -#else - string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node16/bin/node"); -#endif - string hashFilesScript = Path.Combine(binDir, "hashFiles"); - var hashResult = string.Empty; - - var p1 = new ProcessInvokerWrapper(); - p1.Initialize(hc); - - p1.ErrorDataReceived += (_, data) => - { - if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__")) - { - hashResult = data.Data.Substring(10, data.Data.Length - 20); - trace.Info($"Hash result: '{hashResult}'"); - } - else - { - trace.Info(data.Data); - } - }; - - p1.OutputDataReceived += (_, data) => - { - trace.Info(data.Data); - }; - - var env = new Dictionary - { - ["patterns"] = "**" - }; - - int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsRuntimeAssets, - fileName: node, - arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"", - environment: env, - requireExitCodeZero: true, - outputEncoding: null, - killProcessOnCancel: true, - cancellationToken: CancellationToken.None); - - Assert.True(string.Equals(hashResult, File.ReadAllText(dotnetRuntimeHashFile).Trim()), $"Hash mismatch for dotnet runtime. You might need to update `Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently."); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public async Task RunnerLayoutParts_CheckExternalsHash() - { - using (TestHostContext hc = new(this)) - { - Tracing trace = hc.GetTrace(); - var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}"); - trace.Info($"Current hash: {File.ReadAllText(externalsHashFile)}"); - - string layoutTrimsExternalsAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/externals"); - - string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin"); - -#if OS_WINDOWS - string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node16\bin\node"); -#else - string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node16/bin/node"); -#endif - string hashFilesScript = Path.Combine(binDir, "hashFiles"); - var hashResult = string.Empty; - - var p1 = new ProcessInvokerWrapper(); - p1.Initialize(hc); - - p1.ErrorDataReceived += (_, data) => - { - if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__")) - { - hashResult = data.Data.Substring(10, data.Data.Length - 20); - trace.Info($"Hash result: '{hashResult}'"); - } - else - { - trace.Info(data.Data); - } - }; - - p1.OutputDataReceived += (_, data) => - { - trace.Info(data.Data); - }; - - var env = new Dictionary - { - ["patterns"] = "**" - }; - - int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsExternalsAssets, - fileName: node, - arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"", - environment: env, - requireExitCodeZero: true, - outputEncoding: null, - killProcessOnCancel: true, - cancellationToken: CancellationToken.None); - - Assert.True(string.Equals(hashResult, File.ReadAllText(externalsHashFile).Trim()), $"Hash mismatch for externals. You might need to update `Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently."); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public Task RunnerLayoutParts_ContentHashFilesNoNewline() - { - using (TestHostContext hc = new(this)) - { - Tracing trace = hc.GetTrace(); - - var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}"); - var dotnetRuntimeHash = File.ReadAllText(dotnetRuntimeHashFile); - trace.Info($"Current hash: {dotnetRuntimeHash}"); - - var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}"); - var externalsHash = File.ReadAllText(externalsHashFile); - trace.Info($"Current hash: {externalsHash}"); - - Assert.False(externalsHash.Any(x => char.IsWhiteSpace(x)), $"Found whitespace in externals hash file."); - Assert.False(dotnetRuntimeHash.Any(x => char.IsWhiteSpace(x)), $"Found whitespace in dotnet runtime hash file."); - - return Task.CompletedTask; - } - } - } -#endif -} diff --git a/src/Test/L0/Worker/ActionCommandManagerL0.cs b/src/Test/L0/Worker/ActionCommandManagerL0.cs index 693c6f02543..3a1f8f70fe9 100644 --- a/src/Test/L0/Worker/ActionCommandManagerL0.cs +++ b/src/Test/L0/Worker/ActionCommandManagerL0.cs @@ -232,7 +232,7 @@ public void EchoProcessCommandDebugOn() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, diff --git a/src/Test/L0/Worker/ActionManifestManagerL0.cs b/src/Test/L0/Worker/ActionManifestManagerL0.cs index 385ae94633f..91f604c0645 100644 --- a/src/Test/L0/Worker/ActionManifestManagerL0.cs +++ b/src/Test/L0/Worker/ActionManifestManagerL0.cs @@ -757,7 +757,7 @@ public void Load_CompositeActionNoUsing() //Assert var err = Assert.Throws(() => actionManifest.Load(_ec.Object, action_path)); - Assert.Contains($"Fail to load {action_path}", err.Message); + Assert.Contains($"Failed to load {action_path}", err.Message); _ec.Verify(x => x.AddIssue(It.Is(s => s.Message.Contains("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16' or 'node20'.")), It.IsAny()), Times.Once); } finally diff --git a/src/Test/L0/Worker/CreateStepSummaryCommandL0.cs b/src/Test/L0/Worker/CreateStepSummaryCommandL0.cs index 19f956fa8f2..185f44b3865 100644 --- a/src/Test/L0/Worker/CreateStepSummaryCommandL0.cs +++ b/src/Test/L0/Worker/CreateStepSummaryCommandL0.cs @@ -193,7 +193,7 @@ private TestHostContext Setup([CallerMemberName] string name = "") TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "Summary Job"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, diff --git a/src/Test/L0/Worker/ExecutionContextL0.cs b/src/Test/L0/Worker/ExecutionContextL0.cs index 91068d300f2..08abcd09585 100644 --- a/src/Test/L0/Worker/ExecutionContextL0.cs +++ b/src/Test/L0/Worker/ExecutionContextL0.cs @@ -29,7 +29,7 @@ public void AddIssue_CountWarningsErrors() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -106,7 +106,7 @@ public void ApplyContinueOnError_CheckResultAndOutcome() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -162,7 +162,7 @@ public void AddIssue_TrimMessageSize() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -216,7 +216,7 @@ public void AddIssue_OverrideLogMessage() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -271,7 +271,7 @@ public void AddIssue_AddStepAndLineNumberInformation() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -322,7 +322,7 @@ public void Debug_Multilines() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -373,7 +373,7 @@ public void RegisterPostJobAction_ShareState() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -471,7 +471,7 @@ public void RegisterPostJobAction_NotRegisterPostTwice() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -555,7 +555,7 @@ public void ActionResult_Lowercase() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -610,7 +610,7 @@ public void PublishStepTelemetry_RegularStep_NoOpt() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -653,7 +653,7 @@ public void PublishStepTelemetry_RegularStep() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -717,7 +717,7 @@ public void PublishStepTelemetry_EmbeddedStep() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -781,7 +781,7 @@ public void PublishStepResult_EmbeddedStep() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -969,7 +969,7 @@ public void ActionVariables_AddedToVarsContext() TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -1014,7 +1014,7 @@ public void ActionVariables_DebugUsingVars() TimelineReference timeline = new TimelineReference(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -1057,7 +1057,7 @@ public void ActionVariables_SecretsPrecedenceForDebugUsingVars() TimelineReference timeline = new TimelineReference(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, diff --git a/src/Test/L0/Worker/JobExtensionL0.cs b/src/Test/L0/Worker/JobExtensionL0.cs index 3f5e074a852..d66ded66372 100644 --- a/src/Test/L0/Worker/JobExtensionL0.cs +++ b/src/Test/L0/Worker/JobExtensionL0.cs @@ -4,6 +4,8 @@ using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; using GitHub.DistributedTask.WebApi; using GitHub.Runner.Worker; using Moq; @@ -25,6 +27,9 @@ public sealed class JobExtensionL0 private Mock _containerProvider; private Mock _diagnosticLogManager; private Mock _jobHookProvider; + private Mock _snapshotOperationProvider; + + private Pipelines.Snapshot _requestedSnapshot; private CancellationTokenSource _tokenSource; private TestHostContext CreateTestContext([CallerMemberName] String testName = "") @@ -41,7 +46,16 @@ private TestHostContext CreateTestContext([CallerMemberName] String testName = " _directoryManager.Setup(x => x.PrepareDirectory(It.IsAny(), It.IsAny())) .Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" }); _jobHookProvider = new Mock(); + _snapshotOperationProvider = new Mock(); + _requestedSnapshot = null; + _snapshotOperationProvider + .Setup(p => p.CreateSnapshotRequestAsync(It.IsAny(), It.IsAny())) + .Returns((IExecutionContext _, object data) => + { + _requestedSnapshot = data as Pipelines.Snapshot; + return Task.CompletedTask; + }); IActionRunner step1 = new ActionRunner(); IActionRunner step2 = new ActionRunner(); IActionRunner step3 = new ActionRunner(); @@ -100,7 +114,7 @@ private TestHostContext CreateTestContext([CallerMemberName] String testName = " }; Guid jobId = Guid.NewGuid(); - _message = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), steps, null, null, null, null); + _message = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), steps, null, null, null, null, null); GitHubContext github = new(); github["repository"] = new Pipelines.ContextData.StringContextData("actions/runner"); github["secret_source"] = new Pipelines.ContextData.StringContextData("Actions"); @@ -125,6 +139,7 @@ private TestHostContext CreateTestContext([CallerMemberName] String testName = " hc.SetSingleton(_directoryManager.Object); hc.SetSingleton(_diagnosticLogManager.Object); hc.SetSingleton(_jobHookProvider.Object); + hc.SetSingleton(_snapshotOperationProvider.Object); hc.EnqueueInstance(_logger.Object); // JobExecutionContext hc.EnqueueInstance(_logger.Object); // job start hook hc.EnqueueInstance(_logger.Object); // Initial Job @@ -443,5 +458,80 @@ public async Task EnsureNoPreAndPostHookSteps() Assert.Equal(0, _jobEc.PostJobSteps.Count); } } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async Task EnsureNoSnapshotPostJobStep() + { + using (TestHostContext hc = CreateTestContext()) + { + var jobExtension = new JobExtension(); + jobExtension.Initialize(hc); + + _actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny(), It.IsAny>(), It.IsAny())) + .Returns(Task.FromResult(new PrepareResult(new List(), new Dictionary()))); + + _message.Snapshot = null; + await jobExtension.InitializeJob(_jobEc, _message); + + var postJobSteps = _jobEc.PostJobSteps; + Assert.Equal(0, postJobSteps.Count); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public Task EnsureSnapshotPostJobStepForStringToken() + { + var snapshot = new Pipelines.Snapshot("TestImageNameFromStringToken"); + var imageNameValueStringToken = new StringToken(null, null, null, snapshot.ImageName); + return EnsureSnapshotPostJobStepForToken(imageNameValueStringToken, snapshot); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public Task EnsureSnapshotPostJobStepForMappingToken() + { + var snapshot = new Pipelines.Snapshot("TestImageNameFromMappingToken"); + var imageNameValueStringToken = new StringToken(null, null, null, snapshot.ImageName); + var mappingToken = new MappingToken(null, null, null) + { + { new StringToken(null,null,null, PipelineTemplateConstants.ImageName), imageNameValueStringToken } + }; + + return EnsureSnapshotPostJobStepForToken(mappingToken, snapshot); + } + + private async Task EnsureSnapshotPostJobStepForToken(TemplateToken snapshotToken, Pipelines.Snapshot expectedSnapshot) + { + using (TestHostContext hc = CreateTestContext()) + { + var jobExtension = new JobExtension(); + jobExtension.Initialize(hc); + + _actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny(), It.IsAny>(), It.IsAny())) + .Returns(Task.FromResult(new PrepareResult(new List(), new Dictionary()))); + + _message.Snapshot = snapshotToken; + + await jobExtension.InitializeJob(_jobEc, _message); + + var postJobSteps = _jobEc.PostJobSteps; + + Assert.Equal(1, postJobSteps.Count); + var snapshotStep = postJobSteps.First(); + Assert.Equal("Create custom image", snapshotStep.DisplayName); + Assert.Equal($"{PipelineTemplateConstants.Success}()", snapshotStep.Condition); + + // Run the mock snapshot step, so we can verify it was executed with the expected snapshot object. + await snapshotStep.RunAsync(); + + Assert.NotNull(_requestedSnapshot); + Assert.Equal(expectedSnapshot.ImageName, _requestedSnapshot.ImageName); + } + } } } diff --git a/src/Test/L0/Worker/JobRunnerL0.cs b/src/Test/L0/Worker/JobRunnerL0.cs index 5a04c41e9ac..d0a3c012281 100644 --- a/src/Test/L0/Worker/JobRunnerL0.cs +++ b/src/Test/L0/Worker/JobRunnerL0.cs @@ -103,6 +103,7 @@ protected Pipelines.AgentJobRequestMessage GetMessage(String messageType = JobRe testName, testName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, new ActionsEnvironmentReference("staging"), + null, messageType: messageType); message.Variables[Constants.Variables.System.Culture] = "en-US"; message.Resources.Endpoints.Add(new ServiceEndpoint() diff --git a/src/Test/L0/Worker/SnapshotOperationProviderL0.cs b/src/Test/L0/Worker/SnapshotOperationProviderL0.cs new file mode 100644 index 00000000000..4f747ae8e25 --- /dev/null +++ b/src/Test/L0/Worker/SnapshotOperationProviderL0.cs @@ -0,0 +1,78 @@ +#nullable enable +using System; +using System.IO; +using System.Runtime.CompilerServices; +using GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Sdk; +using GitHub.Runner.Worker; +using Moq; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker; + +public class SnapshotOperationProviderL0 +{ + private Mock? _ec; + private SnapshotOperationProvider? _snapshotOperationProvider; + private string? _snapshotRequestFilePath; + private string? _snapshotRequestDirectoryPath; + + [Theory] + [InlineData(true)] + [InlineData(false)] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void CreateSnapshotRequestAsync(bool shouldSnapshotDirectoryAlreadyExist) + { + using (TestHostContext testHostContext = CreateTestHostContext()) + { + //Arrange + Setup(testHostContext, shouldSnapshotDirectoryAlreadyExist); + var expectedSnapshot = new Snapshot(Guid.NewGuid().ToString()); + + //Act + await _snapshotOperationProvider!.CreateSnapshotRequestAsync(_ec!.Object, expectedSnapshot); + + //Assert + var actualSnapshot = IOUtil.LoadObject(_snapshotRequestFilePath); + Assert.NotNull(actualSnapshot); + Assert.Equal(expectedSnapshot.ImageName, actualSnapshot!.ImageName); + _ec.Verify(ec => ec.Write(null, $"Request written to: {_snapshotRequestFilePath}"), Times.Once); + _ec.Verify(ec => ec.Write(null, "This request will be processed after the job completes. You will not receive any feedback on the snapshot process within the workflow logs of this job."), Times.Once); + _ec.Verify(ec => ec.Write(null, "If the snapshot process is successful, you should see a new image with the requested name in the list of available custom images when creating a new GitHub-hosted Runner."), Times.Once); + _ec.VerifyNoOtherCalls(); + } + } + + private void Setup(IHostContext hostContext, bool shouldSnapshotDirectoryAlreadyExist) + { + _ec = new Mock(); + _snapshotOperationProvider = new SnapshotOperationProvider(); + _snapshotOperationProvider.Initialize(hostContext); + _snapshotRequestFilePath = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Root), ".snapshot", "request.json"); + _snapshotRequestDirectoryPath = Path.GetDirectoryName(_snapshotRequestFilePath); + + if (_snapshotRequestDirectoryPath != null) + { + // Clean up any existing the snapshot directory and its contents before starting the test. + if (Directory.Exists(_snapshotRequestDirectoryPath)) + { + Directory.Delete(_snapshotRequestDirectoryPath, true); + } + + if (shouldSnapshotDirectoryAlreadyExist) + { + // Create a fresh snapshot directory if it's required for the test. + Directory.CreateDirectory(_snapshotRequestDirectoryPath); + } + } + } + + private TestHostContext CreateTestHostContext([CallerMemberName] string testName = "") + { + var testHostContext = new TestHostContext(this, testName); + _ec = new Mock(); + _ec.Object.Initialize(testHostContext); + return testHostContext; + } +} diff --git a/src/Test/L0/Worker/WorkerL0.cs b/src/Test/L0/Worker/WorkerL0.cs index fe1f9c02c19..defcc981404 100644 --- a/src/Test/L0/Worker/WorkerL0.cs +++ b/src/Test/L0/Worker/WorkerL0.cs @@ -67,7 +67,7 @@ private Pipelines.AgentJobRequestMessage CreateJobRequestMessage(string jobName) new Pipelines.ContextData.DictionaryContextData() }, }; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, new StringToken(null, null, null, "ubuntu"), sidecarContainers, null, variables, new List(), resources, context, null, actions, null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, new StringToken(null, null, null, "ubuntu"), sidecarContainers, null, variables, new List(), resources, context, null, actions, null, null, null, null, null); return jobRequest; } diff --git a/src/dev.sh b/src/dev.sh index 6a339753d03..485ca520ee3 100644 --- a/src/dev.sh +++ b/src/dev.sh @@ -14,15 +14,10 @@ DEV_TARGET_RUNTIME=$3 SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" LAYOUT_DIR="$SCRIPT_DIR/../_layout" -LAYOUT_TRIMS_DIR="$SCRIPT_DIR/../_layout_trims" -LAYOUT_TRIM_EXTERNALS_DIR="$LAYOUT_TRIMS_DIR/trim_externals" -LAYOUT_TRIM_RUNTIME_DIR="$LAYOUT_TRIMS_DIR/trim_runtime" -LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR="$LAYOUT_TRIMS_DIR/trim_runtime_externals" DOWNLOAD_DIR="$SCRIPT_DIR/../_downloads/netcore2x" PACKAGE_DIR="$SCRIPT_DIR/../_package" -PACKAGE_TRIMS_DIR="$SCRIPT_DIR/../_package_trims" DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk" -DOTNETSDK_VERSION="6.0.415" +DOTNETSDK_VERSION="6.0.419" DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION" RUNNER_VERSION=$(cat runnerversion) @@ -148,48 +143,6 @@ function layout () heading "Setup externals folder for $RUNTIME_ID runner's layout" bash ./Misc/externals.sh $RUNTIME_ID || checkRC externals.sh - - heading "Create layout (Trimmed) ..." - - rm -Rf "$LAYOUT_TRIMS_DIR" - mkdir -p "$LAYOUT_TRIMS_DIR" - mkdir -p "$LAYOUT_TRIMS_DIR/runtime" - cp -r "$LAYOUT_DIR/bin/." "$LAYOUT_TRIMS_DIR/runtime" - mkdir -p "$LAYOUT_TRIMS_DIR/externals" - cp -r "$LAYOUT_DIR/externals/." "$LAYOUT_TRIMS_DIR/externals" - - pushd "$LAYOUT_TRIMS_DIR/runtime" > /dev/null - if [[ ("$CURRENT_PLATFORM" == "windows") ]]; then - sed -i 's/\n$/\r\n/' "$SCRIPT_DIR/Misc/runnercoreassets" - fi - - cat "$SCRIPT_DIR/Misc/runnercoreassets" | xargs rm -f - find . -empty -type d -delete - find . -type f > "$LAYOUT_TRIMS_DIR/runnerdotnetruntimeassets" - popd > /dev/null - - heading "Create layout with externals trimmed ..." - mkdir -p "$LAYOUT_TRIM_EXTERNALS_DIR" - cp -r "$LAYOUT_DIR/." "$LAYOUT_TRIM_EXTERNALS_DIR/" - rm -Rf "$LAYOUT_TRIM_EXTERNALS_DIR/externals" - echo "Created... $LAYOUT_TRIM_EXTERNALS_DIR" - - heading "Create layout with dotnet runtime trimmed ..." - mkdir -p "$LAYOUT_TRIM_RUNTIME_DIR" - cp -r "$LAYOUT_DIR/." "$LAYOUT_TRIM_RUNTIME_DIR/" - pushd "$LAYOUT_TRIM_RUNTIME_DIR/bin" > /dev/null - cat "$LAYOUT_TRIMS_DIR/runnerdotnetruntimeassets" | xargs rm -f - echo "Created... $LAYOUT_TRIM_RUNTIME_DIR" - popd > /dev/null - - heading "Create layout with externals and dotnet runtime trimmed ..." - mkdir -p "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR" - cp -r "$LAYOUT_DIR/." "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR/" - rm -Rf "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR/externals" - pushd "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR/bin" > /dev/null - cat "$LAYOUT_TRIMS_DIR/runnerdotnetruntimeassets" | xargs rm -f - echo "Created... $LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR" - popd > /dev/null } function runtest () @@ -226,9 +179,7 @@ function package () find "${LAYOUT_DIR}/bin" -type f -name '*.pdb' -delete mkdir -p "$PACKAGE_DIR" - mkdir -p "$PACKAGE_TRIMS_DIR" rm -Rf "${PACKAGE_DIR:?}"/* - rm -Rf "${PACKAGE_TRIMS_DIR:?}"/* pushd "$PACKAGE_DIR" > /dev/null @@ -246,66 +197,6 @@ function package () fi popd > /dev/null - - runner_trim_externals_pkg_name="actions-runner-${RUNTIME_ID}-${runner_ver}-noexternals" - heading "Packaging ${runner_trim_externals_pkg_name} (Trimmed)" - - PACKAGE_TRIM_EXTERNALS_DIR="$PACKAGE_TRIMS_DIR/trim_externals" - mkdir -p "$PACKAGE_TRIM_EXTERNALS_DIR" - pushd "$PACKAGE_TRIM_EXTERNALS_DIR" > /dev/null - if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then - tar_name="${runner_trim_externals_pkg_name}.tar.gz" - echo "Creating $tar_name in ${LAYOUT_TRIM_EXTERNALS_DIR}" - tar -czf "${tar_name}" -C "${LAYOUT_TRIM_EXTERNALS_DIR}" . - elif [[ ("$CURRENT_PLATFORM" == "windows") ]]; then - zip_name="${runner_trim_externals_pkg_name}.zip" - echo "Convert ${LAYOUT_TRIM_EXTERNALS_DIR} to Windows style path" - window_path=${LAYOUT_TRIM_EXTERNALS_DIR:1} - window_path=${window_path:0:1}:${window_path:1} - echo "Creating $zip_name in ${window_path}" - $POWERSHELL -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "Add-Type -Assembly \"System.IO.Compression.FileSystem\"; [System.IO.Compression.ZipFile]::CreateFromDirectory(\"${window_path}\", \"${zip_name}\")" - fi - popd > /dev/null - - runner_trim_runtime_pkg_name="actions-runner-${RUNTIME_ID}-${runner_ver}-noruntime" - heading "Packaging ${runner_trim_runtime_pkg_name} (Trimmed)" - - PACKAGE_TRIM_RUNTIME_DIR="$PACKAGE_TRIMS_DIR/trim_runtime" - mkdir -p "$PACKAGE_TRIM_RUNTIME_DIR" - pushd "$PACKAGE_TRIM_RUNTIME_DIR" > /dev/null - if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then - tar_name="${runner_trim_runtime_pkg_name}.tar.gz" - echo "Creating $tar_name in ${LAYOUT_TRIM_RUNTIME_DIR}" - tar -czf "${tar_name}" -C "${LAYOUT_TRIM_RUNTIME_DIR}" . - elif [[ ("$CURRENT_PLATFORM" == "windows") ]]; then - zip_name="${runner_trim_runtime_pkg_name}.zip" - echo "Convert ${LAYOUT_TRIM_RUNTIME_DIR} to Windows style path" - window_path=${LAYOUT_TRIM_RUNTIME_DIR:1} - window_path=${window_path:0:1}:${window_path:1} - echo "Creating $zip_name in ${window_path}" - $POWERSHELL -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "Add-Type -Assembly \"System.IO.Compression.FileSystem\"; [System.IO.Compression.ZipFile]::CreateFromDirectory(\"${window_path}\", \"${zip_name}\")" - fi - popd > /dev/null - - runner_trim_runtime_externals_pkg_name="actions-runner-${RUNTIME_ID}-${runner_ver}-noruntime-noexternals" - heading "Packaging ${runner_trim_runtime_externals_pkg_name} (Trimmed)" - - PACKAGE_TRIM_RUNTIME_EXTERNALS_DIR="$PACKAGE_TRIMS_DIR/trim_runtime_externals" - mkdir -p "$PACKAGE_TRIM_RUNTIME_EXTERNALS_DIR" - pushd "$PACKAGE_TRIM_RUNTIME_EXTERNALS_DIR" > /dev/null - if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then - tar_name="${runner_trim_runtime_externals_pkg_name}.tar.gz" - echo "Creating $tar_name in ${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR}" - tar -czf "${tar_name}" -C "${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR}" . - elif [[ ("$CURRENT_PLATFORM" == "windows") ]]; then - zip_name="${runner_trim_runtime_externals_pkg_name}.zip" - echo "Convert ${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR} to Windows style path" - window_path=${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR:1} - window_path=${window_path:0:1}:${window_path:1} - echo "Creating $zip_name in ${window_path}" - $POWERSHELL -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "Add-Type -Assembly \"System.IO.Compression.FileSystem\"; [System.IO.Compression.ZipFile]::CreateFromDirectory(\"${window_path}\", \"${zip_name}\")" - fi - popd > /dev/null } if [[ (! -d "${DOTNETSDK_INSTALLDIR}") || (! -e "${DOTNETSDK_INSTALLDIR}/.${DOTNETSDK_VERSION}") || (! -e "${DOTNETSDK_INSTALLDIR}/dotnet") ]]; then diff --git a/src/global.json b/src/global.json index 1c3ec6209d5..bf923c69ce8 100644 --- a/src/global.json +++ b/src/global.json @@ -1,5 +1,5 @@ { "sdk": { - "version": "6.0.415" + "version": "6.0.419" } } diff --git a/src/runnerversion b/src/runnerversion index 28c633a1ae1..bba6eff1556 100644 --- a/src/runnerversion +++ b/src/runnerversion @@ -1 +1 @@ -2.311.0 +2.314.0