diff --git a/.circleci/collect_muzzle_deps.sh b/.circleci/collect_muzzle_deps.sh new file mode 100755 index 00000000000..6f80484f97c --- /dev/null +++ b/.circleci/collect_muzzle_deps.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e +#Enable '**' support +shopt -s globstar + +REPORTS_DIR=./reports +mkdir -p $REPORTS_DIR >/dev/null 2>&1 + +echo "saving muzzle dependency reports" + +find workspace/**/build/muzzle-deps-results -type f -name 'dd-java-agent_instrumentation.csv' -exec cp {} $REPORTS_DIR/ \; diff --git a/.circleci/config.continue.yml.j2 b/.circleci/config.continue.yml.j2 new file mode 100644 index 00000000000..026026ea390 --- /dev/null +++ b/.circleci/config.continue.yml.j2 @@ -0,0 +1,1341 @@ +version: 2.1 + +defaults: &defaults + working_directory: ~/dd-trace-java + docker: + - image: &default_container << pipeline.parameters.docker_image >>:<< pipeline.parameters.docker_image_tag >> + +test_matrix: &test_matrix + parameters: + testJvm: +{% for jdk in nocov_jdks %} + - "{{ jdk }}" +{% endfor %} + +profiling_test_matrix: &profiling_test_matrix + parameters: + testJvm: +{% for jdk in all_jdks %} + - "{{ jdk }}" +{% endfor %} + +debugger_test_matrix: &debugger_test_matrix + parameters: + testJvm: +{% for jdk in all_debugger_jdks %} + - "{{ jdk }}" +{% endfor %} + +system_test_matrix: &system_test_matrix + parameters: + weblog-variant: [ 'spring-boot', 'spring-boot-jetty', 'spring-boot-openliberty', 'spring-boot-3-native', 'jersey-grizzly2', 'resteasy-netty3','ratpack', 'vertx3' ] + +agent_integration_tests_modules: &agent_integration_tests_modules "dd-trace-core|communication|internal-api|utils" +core_modules: &core_modules "dd-java-agent|dd-trace-core|communication|internal-api|telemetry|utils|dd-java-agent/agent-bootstrap|dd-java-agent/agent-installer|dd-java-agent/agent-tooling|dd-java-agent/agent-builder|dd-java-agent/appsec|dd-java-agent/agent-crashtracking" +instrumentation_modules: &instrumentation_modules "dd-java-agent/instrumentation|dd-java-agent/agent-tooling|dd-java-agent/agent-installer|dd-java-agent/agent-builder|dd-java-agent/agent-bootstrap|dd-java-agent/appsec|dd-java-agent/testing|dd-trace-core|dd-trace-api|internal-api" +debugger_modules: &debugger_modules "dd-java-agent/agent-debugger|dd-java-agent/agent-bootstrap|dd-java-agent/agent-builder|internal-api|communication|dd-trace-core" +profiling_modules: &profiling_modules "dd-java-agent/agent-profiling" + +default_system_tests_commit: &default_system_tests_commit 2487cea5160a398549743d2cfd927a863792e3bd + +parameters: + nightly: + type: boolean + default: false + weekly: + type: boolean + default: false + + gradle_flags: + # Using no-daemon is important for the caches to be in a consistent state + type: string + default: "--stacktrace --no-daemon" + + global_pattern: + # Pattern for files that should always trigger a test jobs + type: string + default: "^build.gradle$|^settings.gradle$|^gradle.properties$|^buildSrc/|^gradle/|.circleci" + + docker_image: + type: string + default: ghcr.io/datadog/dd-trace-java-docker-build + + docker_image_tag: + type: string + default: {{ docker_image_prefix }}base + +commands: + check_for_leftover_files: + steps: + - run: + name: Check for leftover files + command: | + LEFTOVER_FILES=$(find . -type f -regex '.*\.orig$') + if [[ "$LEFTOVER_FILES" != "" ]] + then + echo -e "Found leftover files in the commit:\n$LEFTOVER_FILES" + exit 1 + fi + + generate_cache_ids: + steps: + - run: + name: Generate cache ids + command: | + # Everything falls back to the main cache + BASE_CACHE_ID="main" + if [ "$CIRCLE_BRANCH" == "master" ]; + then + # If we're on a the main branch, then they are the same + echo "${BASE_CACHE_ID}" >| _circle_ci_cache_id + else + # If we're on a PR branch, then we use the name of the branch and the + # PR number as a stable identifier for the branch cache + echo "${CIRCLE_BRANCH}-${CIRCLE_PULL_REQUEST##*/}" >| _circle_ci_cache_id + fi + # Have new branches start from the main cache + echo "${BASE_CACHE_ID}" >| _circle_ci_cache_base_id + + setup_code: + steps: + - checkout + - run: + name: Checkout merge commit + command: | + CCI_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}" + + if [[ "$CIRCLE_BRANCH" != "master" && -n "${CCI_PR_NUMBER}" ]] + then + FETCH_REFS="${FETCH_REFS} +refs/pull/${CCI_PR_NUMBER}/merge:refs/pull/${CCI_PR_NUMBER}/merge +refs/pull/${CCI_PR_NUMBER}/head:refs/pull/${CCI_PR_NUMBER}/head" + git fetch -u origin ${FETCH_REFS} + + if git merge-base --is-ancestor $(git show-ref --hash refs/pull/${CCI_PR_NUMBER}/head) $(git show-ref --hash refs/pull/${CCI_PR_NUMBER}/merge); then + git checkout "pull/${CCI_PR_NUMBER}/merge" + else + echo "[WARN] There is a merge conflict between master and PR ${CCI_PR_NUMBER}, merge branch cannot be checked out." + git checkout "pull/${CCI_PR_NUMBER}/head" + fi + fi + + - check_for_leftover_files + + - generate_cache_ids + + setup_testcontainers: + description: >- + Sets up remote docker and automatic port forwarding needed for docker on docker + version of Testcontainers. + steps: + - setup_remote_docker: + version: 20.10.18 + # DLC shares Docker layers across jobs (at an extra cost). + # But its time to setup (~1min) exceeds the time required to prefetch all images we use. + docker_layer_caching: false + + - run: + name: Prepare testcontainers environment + command: .circleci/prepare_docker_env.sh + + - run: + name: Testcontainers tunnels + background: true + command: .circleci/start_docker_autoforward.sh + + - run: + name: Prefetch Docker images + background: true + command: .circleci/fetch_docker_images.sh + + early_return_for_forked_pull_requests: + description: >- + If this build is from a fork, stop executing the current job and return success. + This is useful to avoid steps that will fail due to missing credentials. + steps: + - run: + name: Early return if this build is from a forked PR + command: | + if [[ "$CIRCLE_BRANCH" != "master" && -n "$CIRCLE_PR_NUMBER" ]]; then + echo "Nothing to do for forked PRs, so marking this step successful" + circleci step halt + fi + + skip_unless_matching_files_changed: + description: >- + If files matching the regular expression haven't changed in the commit, then skip the job + parameters: + pattern: + type: string + steps: + - run: + name: "Check if files relevant to job have changed" + command: | + CCI_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}" + + if [[ "$CIRCLE_BRANCH" != "master" && -n "$CCI_PR_NUMBER" ]]; then + BRANCH="$(git rev-parse --abbrev-ref HEAD)" + if [[ "$BRANCH" != "master" ]] && [[ "$BRANCH" != "release/*" ]]; then + # We know that we have checked out the PR merge branch, so the HEAD commit is a merge + # As a backup, if anything goes wrong with the diff, the build will fail + CHANGED_FILES=$(git show HEAD | grep -e "^Merge:" | cut -d ' ' -f 2- | sed 's/ /.../' | xargs git diff --name-only) + # Count the number of matches, and ignore if the grep doesn't match anything + MATCH_COUNT=$(echo "$CHANGED_FILES" | grep -c -E "<< pipeline.parameters.global_pattern >>|<< parameters.pattern >>") || true + if [[ "$MATCH_COUNT" -eq "0" ]]; then + circleci step halt + fi + fi + fi + + display_memory_usage: + steps: + - run: + name: Max Memory Used + # The file does not seem to exist when DLC is disabled + command: cat /sys/fs/cgroup/memory/memory.max_usage_in_bytes || true + when: always + + + # The caching setup of the build dependencies is somewhat involved because of how CircleCI works. + # 1) Caches are immutable, so you can not reuse a cache key (the save will simply be ignored) + # 2) Cache keys are prefix matched, and the most recently updated cache that matches will be picked + # + # There is a weekly job that runs on Monday mornings that builds a new cache from scratch. + {% raw %} + restore_dependency_cache: + parameters: + cacheType: + type: string + steps: + - restore_cache: + keys: + # Dependent steps will find this cache + - dd-trace-java-dep<< parameters.cacheType >>-v4-{{ .Branch }}-{{ checksum "_circle_ci_cache_id" }}-{{ .Revision }} + # New branch commits will find this cache + - dd-trace-java-dep<< parameters.cacheType >>-v4-{{ .Branch }}-{{ checksum "_circle_ci_cache_id" }}- + # New branches fall back on main build caches + - dd-trace-java-dep<< parameters.cacheType >>-v4-master-{{ checksum "_circle_ci_cache_base_id" }}- + # Fallback to the previous cache during transition + - dd-trace-java-dep<< parameters.cacheType >>-v3-master-{{ checksum "_circle_ci_cache_base_id" }}- + + save_dependency_cache: + parameters: + cacheType: + type: string + steps: + - save_cache: + key: dd-trace-java-dep<< parameters.cacheType >>-v4-{{ .Branch }}-{{ checksum "_circle_ci_cache_id" }}-{{ .Revision }} + paths: + # Cached dependencies and wrappers for gradle + - ~/.gradle/caches + - ~/.gradle/wrapper + # Cached dependencies for maven + - ~/.m2 + # Cached launchers and compilers for sbt + - ~/.sbt + # Cached dependencies for sbt handled by ivy + - ~/.ivy2 + # Cached dependencies for sbt handled by coursier + - ~/.cache/coursier + + restore_build_cache: + parameters: + cacheType: + type: string + steps: + - restore_cache: + keys: + # Dependent steps will find this cache + - dd-trace-java-build<< parameters.cacheType >>-v4-{{ .Branch }}-{{ checksum "_circle_ci_cache_id" }}-{{ .Revision }} + + save_build_cache: + parameters: + cacheType: + type: string + steps: + - save_cache: + key: dd-trace-java-build<< parameters.cacheType >>-v4-{{ .Branch }}-{{ checksum "_circle_ci_cache_id" }}-{{ .Revision }} + paths: + # Gradle version specific cache for incremental builds. Needs to match version in + # gradle/wrapper/gradle-wrapper.properties + - ~/.gradle/caches/8.3 + # Workspace + - ~/dd-trace-java/.gradle + - ~/dd-trace-java/workspace +{% endraw %} + + setup_system_tests: + parameters: + systemTestsCommit: + type: string + default: *default_system_tests_commit + steps: + - generate_cache_ids + + - restore_build_cache: + cacheType: lib + + - run: + name: Install python 3.9 + command: | + sudo apt-get install python3.9-full python3.9-dev python3.9-venv + echo 'export PATH="$HOME/.local/bin:$PATH"' >>"$BASH_ENV" + + - run: + name: Clone system-tests + command: | + git init system-tests + cd system-tests + git remote add origin https://github.com/DataDog/system-tests.git + git fetch origin << parameters.systemTestsCommit >> + git reset --hard FETCH_HEAD + +jobs: + build: + <<: *defaults + resource_class: xlarge + + parameters: + gradleTarget: + type: string + cacheType: + type: string + collectLibs: + type: boolean + default: false + + steps: + - setup_code + + - restore_dependency_cache: + cacheType: << parameters.cacheType >> + + - run: + name: Build Project + command: >- + MAVEN_OPTS="-Xms64M -Xmx256M" + GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" + ./gradlew clean + << parameters.gradleTarget >> + -PskipTests + << pipeline.parameters.gradle_flags >> + --max-workers=8 + --rerun-tasks + + - when: + condition: + equal: [ true, << parameters.collectLibs >> ] + steps: + - run: + name: Collect Libs + when: always + command: .circleci/collect_libs.sh + - store_artifacts: + path: ./libs + + - run: + name: Collect reports + when: on_fail + command: .circleci/collect_reports.sh --destination ./check_reports --move + + - run: + name: Delete reports + when: on_success + command: .circleci/collect_reports.sh --destination ./check_reports --delete + + - store_artifacts: + path: ./check_reports + + # Save a full dependency cache when building on master or a base project branch. + # We used to do this on the first build of each PR, but now it's skipped at the + # cost of downloading new dependencies a few more times. + - when: + condition: + matches: + pattern: "^(master|project/.+)$" + value: << pipeline.git.branch >> + steps: + - save_dependency_cache: + cacheType: << parameters.cacheType >> + + # Save the small build cache + - save_build_cache: + cacheType: << parameters.cacheType >> + + - display_memory_usage + + spotless: + <<: *defaults + resource_class: medium+ + + steps: + - setup_code + + - run: + name: Run spotless + command: >- + JAVA_HOME=$JAVA_11_HOME + ./gradlew spotlessCheck + << pipeline.parameters.gradle_flags >> + --max-workers=8 + + check: + <<: *defaults + + parameters: + parallelism: + type: integer + default: 1 + gradleTarget: + type: string + cacheType: + type: string + + resource_class: medium+ + + parallelism: << parameters.parallelism >> + + steps: + - setup_code + - restore_dependency_cache: + cacheType: << parameters.cacheType >> + - restore_build_cache: + cacheType: << parameters.cacheType >> + + - run: + name: Check Project + command: >- + MAVEN_OPTS="-Xms64M -Xmx256M" + GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" + ./gradlew + << parameters.gradleTarget >> + -PskipTests + -PrunBuildSrcTests + -PtaskPartitionCount=${CIRCLE_NODE_TOTAL} -PtaskPartition=${CIRCLE_NODE_INDEX} + << pipeline.parameters.gradle_flags >> + --max-workers=8 + + - run: + name: Collect reports + when: on_fail + command: .circleci/collect_reports.sh --destination ./check_reports --move + + - run: + name: Delete reports + when: on_success + command: .circleci/collect_reports.sh --destination ./check_reports --delete + + - store_artifacts: + path: ./check_reports + + - run: + name: Cancel workflow + when: on_fail + command: .circleci/cancel_workflow.sh + + build_clean_cache: + <<: *defaults + + parameters: + gradleTarget: + type: string + cacheType: + type: string + collectLibs: + type: boolean + default: false + + resource_class: xlarge + + steps: + - setup_code + + - run: + name: Build Project + command: >- + MAVEN_OPTS="-Xms64M -Xmx256M" + GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" + ./gradlew clean + << parameters.gradleTarget >> + -PskipTests + << pipeline.parameters.gradle_flags >> + --max-workers=8 + --rerun-tasks + + - when: + condition: + not: + equal: [true, << parameters.collectLibs >>] + steps: + - run: + name: Collect Libs + when: always + command: .circleci/collect_libs.sh + - store_artifacts: + path: ./libs + + - save_dependency_cache: + cacheType: << parameters.cacheType >> + + - display_memory_usage + + tests: &tests + <<: *defaults + # since tests use test containers, they will use a Linux VM / Remote Docker executor, so there is no medium+ size + resource_class: large + + docker: + - image: << pipeline.parameters.docker_image >>:{{ docker_image_prefix }}<< parameters.testJvm >> + + parameters: + testJvm: + type: string + default: "" + maxDaemonHeapSize: + type: string + default: "2G" + gradleParameters: + type: string + default: "" + gradleTarget: + type: string + triggeredBy: + type: string + default: ".*" + stage: + type: string + default: "" + parallelism: + type: integer + default: 1 + maxWorkers: + type: integer + default: 2 + profile: + type: boolean + default: false + continueOnFailure: + type: boolean + default: false + cacheType: + type: string + + parallelism: << parameters.parallelism >> + + steps: + - setup_code + + - skip_unless_matching_files_changed: + pattern: << parameters.triggeredBy >> + + - restore_dependency_cache: + cacheType: << parameters.cacheType >> + - restore_build_cache: + cacheType: << parameters.cacheType >> + + - when: + condition: + or: + - equal: ["core", << parameters.stage >>] + - equal: ["instrumentation", << parameters.stage >>] + - equal: ["smoke", << parameters.stage >>] + steps: + - setup_testcontainers + + - run: + name: Run tests + command: >- + if [[ << parameters.profile >> ]] && [[ << parameters.testJvm >> != "ibm8" ]] && [[ << parameters.testJvm >> != "oracle8" ]]; + then + PROFILER_COMMAND="-XX:StartFlightRecording=settings=profile,filename=/tmp/<< parameters.stage >>-<< parameters.testJvm >>.jfr,dumponexit=true" + fi + + MAVEN_OPTS="-Xms64M -Xmx512M" + GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms<< parameters.maxDaemonHeapSize >> -Xmx<< parameters.maxDaemonHeapSize >> $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=768M -Ddatadog.forkedMinHeapSize=128M" + ./gradlew + << parameters.gradleTarget >> + << parameters.gradleParameters >> + -PtaskPartitionCount=${CIRCLE_NODE_TOTAL} -PtaskPartition=${CIRCLE_NODE_INDEX} + <<# parameters.testJvm >>-PtestJvm=<< parameters.testJvm >><> + << pipeline.parameters.gradle_flags >> + --max-workers=<< parameters.maxWorkers >> + --continue + <<# parameters.continueOnFailure >> || true <> + + - run: + name: Collect reports + when: on_fail + command: .circleci/collect_reports.sh + + - store_artifacts: + path: ./reports.tar + + - when: + condition: + equal: [true, << parameters.profile >>] + steps: + - run: + name: Collect profiles + when: always + command: .circleci/collect_profiles.sh + + - store_artifacts: + path: ./profiles.tar + + - run: + name: Collect test results + when: always + command: .circleci/collect_results.sh + + - store_test_results: + path: ./results + + - display_memory_usage + + - early_return_for_forked_pull_requests + + - run: + name: Upload test results to Datadog + when: always + command: .circleci/upload_ciapp.sh << parameters.stage >> << parameters.testJvm >> || true + + - run: + name: Get APM Test Agent Trace Check Results + when: always + command: | + set +e # Disable exiting from testagent response failure + SUMMARY_RESPONSE=$(curl -s -w "\n%{http_code}" -o summary_response.txt http://localhost:8126/test/trace_check/summary) + set -e + SUMMARY_RESPONSE_CODE=$(echo "$SUMMARY_RESPONSE" | awk 'END {print $NF}') + + if [[ SUMMARY_RESPONSE_CODE -eq 200 ]]; then + echo "APM Test Agent is running. (HTTP 200)" + else + echo "APM Test Agent is not running and was not used for testing. No checks failed." + exit 0 + fi + + RESPONSE=$(curl -s -w "\n%{http_code}" -o response.txt http://localhost:8126/test/trace_check/failures) + RESPONSE_CODE=$(echo "$RESPONSE" | awk 'END {print $NF}') + + if [[ $RESPONSE_CODE -eq 200 ]]; then + echo "All APM Test Agent Check Traces returned successful! (HTTP 200)" + echo "APM Test Agent Check Traces Summary Results:" + cat summary_response.txt | jq '.' + elif [[ $RESPONSE_CODE -eq 404 ]]; then + echo "Real APM Agent running in place of TestAgent, no checks to validate!" + else + echo "APM Test Agent Check Traces failed with response code: $RESPONSE_CODE" + echo "Failures:" + cat response.txt + echo "APM Test Agent Check Traces Summary Results:" + cat summary_response.txt | jq '.' + exit 1 + fi + + xlarge_tests: + <<: *tests + + docker: + - image: << pipeline.parameters.docker_image >>:{{ docker_image_prefix }}<< parameters.testJvm >> + environment: + - CI_USE_TEST_AGENT=true + - image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.11.0 + environment: + - LOG_LEVEL=DEBUG + - TRACE_LANGUAGE=java + - DD_SUPPRESS_TRACE_PARSE_ERRORS=true + - DD_POOL_TRACE_CHECK_FAILURES=true + - DD_DISABLE_ERROR_RESPONSES=true + - ENABLED_CHECKS=trace_content_length,trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service,trace_dd_service + resource_class: xlarge + + + # The only way to do fan-in in CircleCI seems to have a proper job, so let's have one that + # doesn't consume so many resources. The execution time for this including spin up seems to + # be around 6 seconds. + fan_in: + resource_class: small + + docker: + - image: alpine + + parameters: + testJvm: + type: string + default: "all configured JVMs" + stage: + type: string + + steps: + - run: + name: Completed stage << parameters.stage >> on << parameters.testJvm >> passed! + command: echo '<< parameters.stage >> completed!' + + agent_integration_tests: + <<: *tests + + resource_class: medium + + docker: + - image: << pipeline.parameters.docker_image >>:{{ docker_image_prefix }}8 + - image: datadog/agent:7.34.0 + environment: + - DD_APM_ENABLED=true + - DD_BIND_HOST=0.0.0.0 + - DD_API_KEY=invalid_key_but_this_is_fine + + test_published_artifacts: + <<: *defaults + resource_class: medium + docker: + - image: << pipeline.parameters.docker_image >>:{{ docker_image_prefix }}7 + + steps: + - setup_code + - restore_dependency_cache: + cacheType: lib + - restore_build_cache: + cacheType: lib + + - run: + name: Publish Artifacts Locally + command: | + mvn_local_repo=$(./mvnw help:evaluate -Dexpression=settings.localRepository -q -DforceStdout) + rm -rf "${mvn_local_repo}/com/datadoghq" + export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" + ./gradlew publishToMavenLocal << pipeline.parameters.gradle_flags >> --max-workers=3 + + - run: + name: Test Published Artifacts + command: | + cd test-published-dependencies + export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx512M -Xms512M -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" + ./gradlew check --info --max-workers=3 + + - run: + name: Collect Reports + when: on_fail + command: .circleci/collect_reports.sh + + - store_artifacts: + path: ./reports + + - display_memory_usage + muzzle-dep-report: + <<: *defaults + resource_class: medium + steps: + - setup_code + - skip_unless_matching_files_changed: + pattern: "dd-java-agent/instrumentation" + - restore_dependency_cache: + cacheType: inst + - restore_build_cache: + cacheType: inst + - run: + name: Generate muzzle dep report + command: >- + SKIP_BUILDSCAN="true" + ./gradlew generateMuzzleReport muzzleInstrumentationReport + - run: + name: Collect Reports + command: .circleci/collect_muzzle_deps.sh + - store_artifacts: + path: ./reports + + muzzle: + <<: *defaults + resource_class: medium + parallelism: 3 + steps: + - setup_code + + - skip_unless_matching_files_changed: + pattern: "dd-java-agent/instrumentation" + + # We are not running with a separate cache of all muzzle artifacts here because it gets very big and + # ends up taking more time restoring/saving than the actual increase in time it takes just + # downloading the artifacts each time. + # + # Let's at least restore the build cache to have something to start from. + - restore_dependency_cache: + cacheType: inst + - restore_build_cache: + cacheType: inst + + - run: + name: Gather muzzle tasks + command: >- + SKIP_BUILDSCAN="true" + ./gradlew writeMuzzleTasksToFile + << pipeline.parameters.gradle_flags >> + --max-workers=3 + + - run: + name: Verify Muzzle + command: >- + SKIP_BUILDSCAN="true" + GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" + ./gradlew `circleci tests split --split-by=timings workspace/build/muzzleTasks | xargs` + << pipeline.parameters.gradle_flags >> + --max-workers=4 + + - run: + name: Collect Reports + when: on_fail + command: .circleci/collect_reports.sh + + - store_artifacts: + path: ./reports + + - store_test_results: + path: workspace/build/muzzle-test-results + + - display_memory_usage + + system-tests: + machine: + # https://support.circleci.com/hc/en-us/articles/360007324514-How-can-I-use-Docker-volume-mounting-on-CircleCI- + image: ubuntu-2004:current + resource_class: medium + parameters: + weblog-variant: + type: string + steps: + - setup_system_tests + + - run: + name: Copy jar file to system test binaries folder + command: | + ls -la ~/dd-trace-java/workspace/dd-java-agent/build/libs + cp ~/dd-trace-java/workspace/dd-java-agent/build/libs/*.jar system-tests/binaries/ + + - run: + name: Build + command: | + cd system-tests + ./build.sh java --weblog-variant << parameters.weblog-variant >> + + - run: + name: Run + command: | + cd system-tests + DD_API_KEY=$SYSTEM_TESTS_DD_API_KEY ./run.sh + + - run: + name: Run APM E2E default tests + # Stop the job after 5m to avoid excessive overhead. Will need adjustment as more tests are added. + no_output_timeout: 5m + command: | + cd system-tests + DD_SITE=datadoghq.com DD_API_KEY=$SYSTEM_TESTS_E2E_DD_API_KEY DD_APPLICATION_KEY=$SYSTEM_TESTS_E2E_DD_APP_KEY ./run.sh APM_TRACING_E2E + + - run: + name: Run APM E2E Single Span tests + # Stop the job after 5m to avoid excessive overhead. Will need adjustment as more tests are added. + no_output_timeout: 5m + command: | + cd system-tests + DD_SITE=datadoghq.com DD_API_KEY=$SYSTEM_TESTS_E2E_DD_API_KEY DD_APPLICATION_KEY=$SYSTEM_TESTS_E2E_DD_APP_KEY ./run.sh APM_TRACING_E2E_SINGLE_SPAN + + - run: + name: Upload data to CI Visibility + command: | + cd system-tests + export DD_API_KEY=$SYSTEM_TESTS_CI_API_KEY + export DD_APP_KEY=$SYSTEM_TESTS_CI_APP_KEY + + # Causes conflicts with DD_API_KEY and datadog-ci tool + unset DATADOG_API_KEY + + echo "Uploading tests results to CI Visibility" + utils/scripts/upload_results_CI_visibility.sh dev java-tracer << pipeline.id >>-<< pipeline.number >> + + if [[ $CIRCLE_BRANCH == "master" ]]; then + echo "Updating dashboard from dd-trace-java main branch" + utils/scripts/update_dashboard_CI_visibility.sh java-tracer << pipeline.id >>-<< pipeline.number >> + else + echo "Skipping CI Visibility dashboard update due to it is not a main branch" + fi + + - run: + name: Collect artifacts + command: tar -cvzf logs_java_<< parameters.weblog-variant >>_dev.tar.gz -C system-tests logs logs_apm_tracing_e2e logs_apm_tracing_e2e_single_span + + - store_artifacts: + path: logs_java_<< parameters.weblog-variant >>_dev.tar.gz + + integrations-system-tests: + machine: + # https://support.circleci.com/hc/en-us/articles/360007324514-How-can-I-use-Docker-volume-mounting-on-CircleCI- + image: ubuntu-2004:current + resource_class: medium + steps: + - setup_system_tests + + - run: + name: Copy jar file to system test binaries folder + command: | + ls -la ~/dd-trace-java/workspace/dd-java-agent/build/libs + cp ~/dd-trace-java/workspace/dd-java-agent/build/libs/*.jar system-tests/binaries/ + + - run: + name: Build + # Build the default framework, which is springboot + command: | + cd system-tests + ./build.sh java + + - run: + name: Run APM Integrations tests + # Stop the job after 5m to avoid excessive overhead. Will need adjustment as more tests are added. + no_output_timeout: 5m + command: | + cd system-tests + DD_SITE=datadoghq.com DD_API_KEY=$SYSTEM_TESTS_E2E_DD_API_KEY DD_APPLICATION_KEY=$SYSTEM_TESTS_E2E_DD_APP_KEY ./run.sh INTEGRATIONS + + - store_test_results: + path: system-tests/logs_integrations + + - store_artifacts: + path: system-tests/logs_integrations + + parametric-tests: + machine: + # https://support.circleci.com/hc/en-us/articles/360007324514-How-can-I-use-Docker-volume-mounting-on-CircleCI- + image: ubuntu-2004:current + resource_class: large + steps: + - setup_system_tests + + - run: + name: Copy jar files to system test binaries folder + command: | + ls -la ~/dd-trace-java/workspace/dd-trace-api/build/libs + ls -la ~/dd-trace-java/workspace/dd-java-agent/build/libs + cp ~/dd-trace-java/workspace/dd-trace-api/build/libs/*.jar system-tests/binaries/ + cp ~/dd-trace-java/workspace/dd-java-agent/build/libs/*.jar system-tests/binaries/ + + - run: + name: Install requirements + command: | + cd system-tests + pyenv local system + python3.9 --version + python3.9 -m pip install wheel + python3.9 -m pip install -r requirements.txt + sudo ln -sf /usr/bin/python3.9 /usr/bin/python + + - run: + name: Run + command: | + set -e + cd system-tests + export TEST_LIBRARY=java + export PYTEST_WORKER_COUNT=8 + ./build.sh -i runner + ./run.sh PARAMETRIC --log-cli-level=DEBUG --durations=30 -vv + + - store_test_results: + path: system-tests/logs_parametric + + - run: + name: Collect artifacts + command: tar -cvzf logs_java_parametric_dev.tar.gz -C system-tests logs_parametric + + - store_artifacts: + path: logs_java_parametric_dev.tar.gz + + +build_test_jobs: &build_test_jobs + - build: + name: build_lib + gradleTarget: shadowJar + cacheType: lib + collectLibs: true + - build: + name: build_base + gradleTarget: :baseTest + cacheType: base + - build: + name: build_inst + gradleTarget: :instrumentationTest + cacheType: inst + - build: + name: build_latestdep + gradleTarget: :instrumentationLatestDepTest + cacheType: latestdep + - build: + name: build_smoke + gradleTarget: :smokeTest + cacheType: smoke + - build: + name: build_profiling + gradleTarget: :profilingTest + cacheType: profiling + - spotless + + - fan_in: + requires: + - build_lib + - build_base + - build_inst + - build_smoke + - build_profiling + - spotless + name: ok_to_test + stage: ok_to_test + + - check: + requires: + - ok_to_test + name: check_base + gradleTarget: ":baseCheck" + cacheType: base + + - check: + requires: + - ok_to_test + name: check_inst + parallelism: 4 + gradleTarget: ":instrumentationCheck" + cacheType: inst + + - check: + requires: + - ok_to_test + name: check_smoke + gradleTarget: ":smokeCheck" + cacheType: smoke + + - check: + requires: + - ok_to_test + name: check_profiling + gradleTarget: ":profilingCheck" + cacheType: profiling + + - fan_in: + requires: + - check_base + - check_inst + - check_smoke + - check_profiling + name: check + stage: check + + - tests: + requires: + - ok_to_test + name: z_test_<< matrix.testJvm >>_base + triggeredBy: *core_modules + gradleTarget: ":baseTest" + gradleParameters: "-PskipFlakyTests -PskipInstTests -PskipSmokeTests -PskipProfilingTests" + stage: core + cacheType: base + parallelism: 4 + maxWorkers: 4 + matrix: + <<: *test_matrix + + - tests: + requires: + - ok_to_test + name: z_test_8_base + triggeredBy: *core_modules + gradleTarget: :baseTest jacocoTestReport jacocoTestCoverageVerification + gradleParameters: "-PskipFlakyTests -PskipInstTests -PskipSmokeTests -PskipProfilingTests" + stage: core + cacheType: base + parallelism: 4 + maxWorkers: 4 + testJvm: "8" + + - xlarge_tests: + requires: + - ok_to_test + name: z_test_<< matrix.testJvm >>_inst + gradleTarget: ":instrumentationTest" + gradleParameters: "-PskipFlakyTests" + triggeredBy: *instrumentation_modules + stage: instrumentation + cacheType: inst + parallelism: 4 + maxWorkers: 4 + matrix: + <<: *test_matrix + + - xlarge_tests: + requires: + - ok_to_test + name: z_test_8_inst + gradleTarget: ":instrumentationTest" + gradleParameters: "-PskipFlakyTests" + triggeredBy: *instrumentation_modules + stage: instrumentation + cacheType: inst + parallelism: 4 + maxWorkers: 4 + testJvm: "8" + + - xlarge_tests: + requires: + - ok_to_test + - build_latestdep + name: test_8_inst_latest + gradleTarget: ":instrumentationLatestDepTest" + gradleParameters: "-PskipFlakyTests" + triggeredBy: *instrumentation_modules + stage: instrumentation + cacheType: latestdep + parallelism: 4 + maxWorkers: 4 + testJvm: "8" + +{% if flaky %} + - tests: + requires: + - ok_to_test + name: z_test_8_flaky_base + gradleTarget: ":baseTest" + gradleParameters: "-PrunFlakyTests" + continueOnFailure: true + triggeredBy: *core_modules + stage: core + cacheType: base + parallelism: 4 + maxWorkers: 4 + testJvm: "8" + + - xlarge_tests: + requires: + - ok_to_test + name: z_test_8_flaky_inst + gradleTarget: ":instrumentationTest" + gradleParameters: "-PrunFlakyTests" + continueOnFailure: true + triggeredBy: *instrumentation_modules + stage: instrumentation + cacheType: inst + parallelism: 2 + maxWorkers: 4 + testJvm: "8" + + - tests: + requires: + - ok_to_test + name: z_test_8_flaky_smoke + gradleTarget: ":smokeTest" + gradleParameters: "-PrunFlakyTests" + continueOnFailure: true + stage: smoke + cacheType: smoke + parallelism: 4 + maxWorkers: 4 + testJvm: "8" +{% endif %} + + - tests: + requires: + - ok_to_test + maxWorkers: 4 + gradleTarget: ":profilingTest" + gradleParameters: "-PskipFlakyTests" + triggeredBy: *profiling_modules + stage: profiling + cacheType: profiling + name: test_<< matrix.testJvm >>_profiling + matrix: + <<: *profiling_test_matrix + + - tests: + requires: + - ok_to_test + name: test_<< matrix.testJvm >>_debugger + maxWorkers: 4 + gradleTarget: ":debuggerTest" + gradleParameters: "-PskipFlakyTests" + triggeredBy: *debugger_modules + stage: debugger + cacheType: base + matrix: + <<: *debugger_test_matrix + + - tests: + requires: + - ok_to_test + name: z_test_<< matrix.testJvm >>_smoke + gradleTarget: "stageMainDist :smokeTest" + gradleParameters: "-PskipFlakyTests" + stage: smoke + cacheType: smoke + parallelism: 4 + maxWorkers: 3 + matrix: + <<: *test_matrix + + - tests: + requires: + - ok_to_test + name: test_semeru8_debugger_smoke + maxWorkers: 4 + gradleTarget: "stageMainDist dd-smoke-tests:debugger-integration-tests:test" + gradleParameters: "-PskipFlakyTests" + triggeredBy: *debugger_modules + stage: debugger + cacheType: smoke + testJvm: "semeru8" + + - tests: + requires: + - ok_to_test + name: test_graalvm17_smoke + gradleTarget: "stageMainDist :dd-smoke-test:spring-boot-3.0-native:test" + stage: smoke + cacheType: smoke + testJvm: "graalvm17" + + - tests: + requires: + - ok_to_test + name: z_test_8_smoke + gradleTarget: "stageMainDist :smokeTest" + gradleParameters: "-PskipFlakyTests" + stage: smoke + cacheType: smoke + parallelism: 4 + maxWorkers: 3 + testJvm: "8" + + - fan_in: + requires: + - z_test_<< matrix.testJvm >>_base + - z_test_<< matrix.testJvm >>_inst + - z_test_<< matrix.testJvm >>_smoke + name: test_<< matrix.testJvm >> + stage: tracing + matrix: + <<: *test_matrix + + - fan_in: + requires: + - z_test_8_base + - z_test_8_inst + - z_test_8_smoke + name: test_8 + stage: tracing + testJvm: "8" + + - agent_integration_tests: + requires: + - ok_to_test + triggeredBy: *agent_integration_tests_modules + gradleTarget: traceAgentTest + cacheType: base + testJvm: "8" + + - test_published_artifacts: + requires: + - ok_to_test + + - muzzle: + requires: + - ok_to_test + filters: + branches: + ignore: + - master + - project/* + - release/* + + - muzzle-dep-report: + requires: + - ok_to_test + + - system-tests: + requires: + - ok_to_test + matrix: + <<: *system_test_matrix + + - integrations-system-tests: + requires: + - ok_to_test + + - parametric-tests: + requires: + - ok_to_test + + - fan_in: + requires: + - test_published_artifacts +{% for jdk in all_jdks %} + - "test_{{ jdk }}_profiling" +{% endfor %} + name: profiling + stage: profiling + + - fan_in: + requires: + - test_published_artifacts +{% for jdk in all_jdks %} + - "test_{{ jdk }}_debugger" +{% endfor %} + name: debugger + stage: debugger + + # This job requires all the jobs needed for a successful build, so GitHub only needs to enforce this one, + # and it will be simpler to require different JVM versions for different branches and old releases + - fan_in: + requires: + - check + - test_published_artifacts + - agent_integration_tests +{% for jdk in all_jdks %} + - "test_{{ jdk }}" +{% endfor %} + - profiling + - debugger + name: required + stage: required + +workflows: +{% if is_regular %} + build_test: + jobs: + *build_test_jobs +{% endif %} +{% if is_nightly %} + nightly: + jobs: + *build_test_jobs +{% endif %} +{% if is_weekly %} + weekly: + jobs: + # This will rebuild a main caches with a new timestamp from a clean slate + - build_clean_cache: + name: build_cache_lib + gradleTarget: shadowJar + cacheType: lib + collectLibs: false + - build_clean_cache: + name: build_cache_base + gradleTarget: :baseTest + cacheType: base + - build_clean_cache: + name: build_cache_inst + gradleTarget: :instrumentationTest + cacheType: inst + - build_clean_cache: + name: build_cache_latestdep + gradleTarget: :instrumentationLatestDepTest + cacheType: latestdep + - build_clean_cache: + name: build_cache_smoke + gradleTarget: :smokeTest + cacheType: smoke + - build_clean_cache: + name: build_cache_profiling + gradleTarget: :profilingTest + cacheType: profiling +{% endif %} diff --git a/.circleci/config.yml b/.circleci/config.yml index f37f89f7d33..ba038c80090 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,1340 +1,37 @@ version: 2.1 - -defaults: &defaults - working_directory: ~/dd-trace-java - docker: - - image: &default_container << pipeline.parameters.docker_image >>:<< pipeline.parameters.docker_image_tag >> - -test_matrix: &test_matrix - parameters: - testJvm: [ "ibm8", "semeru8", "zulu8", "oracle8", "11", "zulu11", "17", "ubuntu17" ] - -profiling_test_matrix: &profiling_test_matrix - parameters: - testJvm: [ "8", "zulu8", "oracle8", "11", "zulu11", "17", "ubuntu17" ] - -system_test_matrix: &system_test_matrix - parameters: - weblog-variant: [ 'spring-boot', 'spring-boot-jetty', 'spring-boot-openliberty', 'spring-boot-3-native', 'jersey-grizzly2', 'resteasy-netty3','ratpack', 'vertx3' ] - -agent_integration_tests_modules: &agent_integration_tests_modules "dd-trace-core|communication|internal-api|utils" -core_modules: &core_modules "dd-java-agent|dd-trace-core|communication|internal-api|telemetry|utils|dd-java-agent/agent-bootstrap|dd-java-agent/agent-installer|dd-java-agent/agent-tooling|dd-java-agent/agent-builder|dd-java-agent/appsec|dd-java-agent/agent-crashtracking" -instrumentation_modules: &instrumentation_modules "dd-java-agent/instrumentation|dd-java-agent/agent-tooling|dd-java-agent/agent-installer|dd-java-agent/agent-builder|dd-java-agent/agent-bootstrap|dd-java-agent/appsec|dd-java-agent/testing|dd-trace-core|dd-trace-api|internal-api" -debugger_modules: &debugger_modules "dd-java-agent/agent-debugger|dd-java-agent/agent-bootstrap|dd-java-agent/agent-builder|internal-api|communication|dd-trace-core" -profiling_modules: &profiling_modules "dd-java-agent/agent-profiling" - -default_system_tests_commit: &default_system_tests_commit 18afda441d6e92fe3befeddd4dd20ff558ca4f0c - +setup: true +python310_image: &python310_image cimg/python:3.10 parameters: - gradle_flags: - # Using no-daemon is important for the caches to be in a consistent state - type: string - default: "--stacktrace --no-daemon" - - global_pattern: - # Pattern for files that should always trigger a test jobs - type: string - default: "^build.gradle$|^settings.gradle$|^gradle.properties$|^buildSrc/|^gradle/|.circleci" - - docker_image: - type: string - default: ghcr.io/datadog/dd-trace-java-docker-build - - docker_image_tag: - type: string - default: base - -commands: - check_for_leftover_files: - steps: - - run: - name: Check for leftover files - command: | - LEFTOVER_FILES=$(find . -type f -regex '.*\.orig$') - if [[ "$LEFTOVER_FILES" != "" ]] - then - echo -e "Found leftover files in the commit:\n$LEFTOVER_FILES" - exit 1 - fi - - generate_cache_ids: - steps: - - run: - name: Generate cache ids - command: | - # Everything falls back to the main cache - BASE_CACHE_ID="main" - if [ "$CIRCLE_BRANCH" == "master" ]; - then - # If we're on a the main branch, then they are the same - echo "${BASE_CACHE_ID}" >| _circle_ci_cache_id - else - # If we're on a PR branch, then we use the name of the branch and the - # PR number as a stable identifier for the branch cache - echo "${CIRCLE_BRANCH}-${CIRCLE_PULL_REQUEST##*/}" >| _circle_ci_cache_id - fi - # Have new branches start from the main cache - echo "${BASE_CACHE_ID}" >| _circle_ci_cache_base_id - - setup_code: - steps: - - checkout - - run: - name: Checkout merge commit - command: | - CCI_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}" - - if [[ "$CIRCLE_BRANCH" != "master" && -n "${CCI_PR_NUMBER}" ]] - then - FETCH_REFS="${FETCH_REFS} +refs/pull/${CCI_PR_NUMBER}/merge:refs/pull/${CCI_PR_NUMBER}/merge +refs/pull/${CCI_PR_NUMBER}/head:refs/pull/${CCI_PR_NUMBER}/head" - git fetch -u origin ${FETCH_REFS} - - if git merge-base --is-ancestor $(git show-ref --hash refs/pull/${CCI_PR_NUMBER}/head) $(git show-ref --hash refs/pull/${CCI_PR_NUMBER}/merge); then - git checkout "pull/${CCI_PR_NUMBER}/merge" - else - echo "[WARN] There is a merge conflict between master and PR ${CCI_PR_NUMBER}, merge branch cannot be checked out." - git checkout "pull/${CCI_PR_NUMBER}/head" - fi - fi - - - check_for_leftover_files - - - generate_cache_ids - - setup_testcontainers: - description: >- - Sets up remote docker and automatic port forwarding needed for docker on docker - version of Testcontainers. - steps: - - setup_remote_docker: - version: 20.10.18 - # DLC shares Docker layers across jobs (at an extra cost). - # But its time to setup (~1min) exceeds the time required to prefetch all images we use. - docker_layer_caching: false - - - run: - name: Prepare testcontainers environment - command: .circleci/prepare_docker_env.sh - - - run: - name: Testcontainers tunnels - background: true - command: .circleci/start_docker_autoforward.sh - - - run: - name: Prefetch Docker images - background: true - command: .circleci/fetch_docker_images.sh - - early_return_for_forked_pull_requests: - description: >- - If this build is from a fork, stop executing the current job and return success. - This is useful to avoid steps that will fail due to missing credentials. - steps: - - run: - name: Early return if this build is from a forked PR - command: | - if [[ "$CIRCLE_BRANCH" != "master" && -n "$CIRCLE_PR_NUMBER" ]]; then - echo "Nothing to do for forked PRs, so marking this step successful" - circleci step halt - fi - - skip_unless_matching_files_changed: - description: >- - If files matching the regular expression haven't changed in the commit, then skip the job - parameters: - pattern: - type: string - steps: - - run: - name: "Check if files relevant to job have changed" - command: | - CCI_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}" - - if [[ "$CIRCLE_BRANCH" != "master" && -n "$CCI_PR_NUMBER" ]]; then - BRANCH="$(git rev-parse --abbrev-ref HEAD)" - if [[ "$BRANCH" != "master" ]] && [[ "$BRANCH" != "release/*" ]]; then - # We know that we have checked out the PR merge branch, so the HEAD commit is a merge - # As a backup, if anything goes wrong with the diff, the build will fail - CHANGED_FILES=$(git show HEAD | grep -e "^Merge:" | cut -d ' ' -f 2- | sed 's/ /.../' | xargs git diff --name-only) - # Count the number of matches, and ignore if the grep doesn't match anything - MATCH_COUNT=$(echo "$CHANGED_FILES" | grep -c -E "<< pipeline.parameters.global_pattern >>|<< parameters.pattern >>") || true - if [[ "$MATCH_COUNT" -eq "0" ]]; then - circleci step halt - fi - fi - fi - - display_memory_usage: - steps: - - run: - name: Max Memory Used - # The file does not seem to exist when DLC is disabled - command: cat /sys/fs/cgroup/memory/memory.max_usage_in_bytes || true - when: always - - - # The caching setup of the build dependencies is somewhat involved because of how CircleCI works. - # 1) Caches are immutable, so you can not reuse a cache key (the save will simply be ignored) - # 2) Cache keys are prefix matched, and the most recently updated cache that matches will be picked - # - # There is a weekly job that runs on Monday mornings that builds a new cache from scratch. - restore_dependency_cache: - parameters: - cacheType: - type: string - steps: - - restore_cache: - keys: - # Dependent steps will find this cache - - dd-trace-java-dep<< parameters.cacheType >>-v4-{{ .Branch }}-{{ checksum "_circle_ci_cache_id" }}-{{ .Revision }} - # New branch commits will find this cache - - dd-trace-java-dep<< parameters.cacheType >>-v4-{{ .Branch }}-{{ checksum "_circle_ci_cache_id" }}- - # New branches fall back on main build caches - - dd-trace-java-dep<< parameters.cacheType >>-v4-master-{{ checksum "_circle_ci_cache_base_id" }}- - # Fallback to the previous cache during transition - - dd-trace-java-dep<< parameters.cacheType >>-v3-master-{{ checksum "_circle_ci_cache_base_id" }}- - - save_dependency_cache: - parameters: - cacheType: - type: string - steps: - - save_cache: - key: dd-trace-java-dep<< parameters.cacheType >>-v4-{{ .Branch }}-{{ checksum "_circle_ci_cache_id" }}-{{ .Revision }} - paths: - # Cached dependencies and wrappers for gradle - - ~/.gradle/caches - - ~/.gradle/wrapper - # Cached dependencies for maven - - ~/.m2 - # Cached launchers and compilers for sbt - - ~/.sbt - # Cached dependencies for sbt handled by ivy - - ~/.ivy2 - # Cached dependencies for sbt handled by coursier - - ~/.cache/coursier - - restore_build_cache: - parameters: - cacheType: - type: string - steps: - - restore_cache: - keys: - # Dependent steps will find this cache - - dd-trace-java-build<< parameters.cacheType >>-v4-{{ .Branch }}-{{ checksum "_circle_ci_cache_id" }}-{{ .Revision }} - - save_build_cache: - parameters: - cacheType: - type: string - steps: - - save_cache: - key: dd-trace-java-build<< parameters.cacheType >>-v4-{{ .Branch }}-{{ checksum "_circle_ci_cache_id" }}-{{ .Revision }} - paths: - # Gradle version specific cache for incremental builds. Needs to match version in - # gradle/wrapper/gradle-wrapper.properties - - ~/.gradle/caches/8.2 - # Workspace - - ~/dd-trace-java/.gradle - - ~/dd-trace-java/workspace - - setup_system_tests: - parameters: - systemTestsCommit: - type: string - default: *default_system_tests_commit - steps: - - generate_cache_ids - - - restore_build_cache: - cacheType: lib - - - run: - # TODO: removes this step once host-in-runner is merged on system tests - name: Install good version of docker-compose - command: | - sudo curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose - sudo chmod +x /usr/local/bin/docker-compose - - - run: - name: Install python 3.9 - command: | - sudo apt-get install python3.9-full python3.9-dev python3.9-venv - echo 'export PATH="$HOME/.local/bin:$PATH"' >>"$BASH_ENV" - - - run: - name: versions - command: | - docker --version - docker-compose --version - - - run: - name: Clone system-tests - command: | - git init system-tests - cd system-tests - git remote add origin https://github.com/DataDog/system-tests.git - git fetch origin << parameters.systemTestsCommit >> - git reset --hard FETCH_HEAD - -jobs: - build: - <<: *defaults - resource_class: xlarge - - parameters: - gradleTarget: - type: string - cacheType: - type: string - collectLibs: - type: boolean - default: false - - steps: - - setup_code - - - restore_dependency_cache: - cacheType: << parameters.cacheType >> - - - run: - name: Build Project - command: >- - MAVEN_OPTS="-Xms64M -Xmx256M" - GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew clean - << parameters.gradleTarget >> - -PskipTests - << pipeline.parameters.gradle_flags >> - --max-workers=8 - --rerun-tasks - - - when: - condition: - equal: [ true, << parameters.collectLibs >> ] - steps: - - run: - name: Collect Libs - when: always - command: .circleci/collect_libs.sh - - store_artifacts: - path: ./libs - - - run: - name: Collect reports - when: on_fail - command: .circleci/collect_reports.sh --destination ./check_reports --move - - - run: - name: Delete reports - when: on_success - command: .circleci/collect_reports.sh --destination ./check_reports --delete - - - store_artifacts: - path: ./check_reports - - # Save a full dependency cache when building on master or a base project branch. - # We used to do this on the first build of each PR, but now it's skipped at the - # cost of downloading new dependencies a few more times. - - when: - condition: - matches: - pattern: "^(master|project/.+)$" - value: << pipeline.git.branch >> - steps: - - save_dependency_cache: - cacheType: << parameters.cacheType >> - - # Save the small build cache - - save_build_cache: - cacheType: << parameters.cacheType >> - - - display_memory_usage - - spotless: - <<: *defaults - resource_class: medium+ - - steps: - - setup_code - - - run: - name: Run spotless - command: >- - JAVA_HOME=$JAVA_11_HOME - ./gradlew spotlessCheck - << pipeline.parameters.gradle_flags >> - --max-workers=8 - - check: - <<: *defaults - - parameters: - parallelism: - type: integer - default: 1 - gradleTarget: - type: string - cacheType: - type: string - - resource_class: medium+ - - parallelism: << parameters.parallelism >> - - steps: - - setup_code - - restore_dependency_cache: - cacheType: << parameters.cacheType >> - - restore_build_cache: - cacheType: << parameters.cacheType >> - - - run: - name: Check Project - command: >- - MAVEN_OPTS="-Xms64M -Xmx256M" - GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew - << parameters.gradleTarget >> - -PskipTests - -PrunBuildSrcTests - -PtaskPartitionCount=${CIRCLE_NODE_TOTAL} -PtaskPartition=${CIRCLE_NODE_INDEX} - << pipeline.parameters.gradle_flags >> - --max-workers=8 - - - run: - name: Collect reports - when: on_fail - command: .circleci/collect_reports.sh --destination ./check_reports --move - - - run: - name: Delete reports - when: on_success - command: .circleci/collect_reports.sh --destination ./check_reports --delete - - - store_artifacts: - path: ./check_reports - - - run: - name: Cancel workflow - when: on_fail - command: .circleci/cancel_workflow.sh - - build_clean_cache: - <<: *defaults - - parameters: - gradleTarget: - type: string - cacheType: - type: string - collectLibs: - type: boolean - default: false - - resource_class: xlarge - - steps: - - setup_code - - - run: - name: Build Project - command: >- - MAVEN_OPTS="-Xms64M -Xmx256M" - GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew clean - << parameters.gradleTarget >> - -PskipTests - << pipeline.parameters.gradle_flags >> - --max-workers=8 - --rerun-tasks - - - when: - condition: - not: - equal: [true, << parameters.collectLibs >>] - steps: - - run: - name: Collect Libs - when: always - command: .circleci/collect_libs.sh - - store_artifacts: - path: ./libs - - - save_dependency_cache: - cacheType: << parameters.cacheType >> - - - display_memory_usage - - tests: &tests - <<: *defaults - # since tests use test containers, they will use a Linux VM / Remote Docker executor, so there is no medium+ size - resource_class: large - - docker: - - image: << pipeline.parameters.docker_image >>:<< parameters.testJvm >> - - parameters: - testJvm: - type: string - default: "" - maxDaemonHeapSize: - type: string - default: "2G" - gradleParameters: - type: string - default: "" - gradleTarget: - type: string - triggeredBy: - type: string - default: ".*" - stage: - type: string - default: "" - parallelism: - type: integer - default: 1 - maxWorkers: - type: integer - default: 2 - profile: - type: boolean - default: false - continueOnFailure: - type: boolean - default: false - cacheType: - type: string - - parallelism: << parameters.parallelism >> - - steps: - - setup_code - - - skip_unless_matching_files_changed: - pattern: << parameters.triggeredBy >> - - - restore_dependency_cache: - cacheType: << parameters.cacheType >> - - restore_build_cache: - cacheType: << parameters.cacheType >> - - - when: - condition: - or: - - equal: ["core", << parameters.stage >>] - - equal: ["instrumentation", << parameters.stage >>] - - equal: ["smoke", << parameters.stage >>] - steps: - - setup_testcontainers - - - run: - name: Run tests - command: >- - if [[ << parameters.profile >> ]] && [[ << parameters.testJvm >> != "ibm8" ]] && [[ << parameters.testJvm >> != "oracle8" ]]; - then - PROFILER_COMMAND="-XX:StartFlightRecording=settings=profile,filename=/tmp/<< parameters.stage >>-<< parameters.testJvm >>.jfr,dumponexit=true" - fi - - MAVEN_OPTS="-Xms64M -Xmx512M" - GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms<< parameters.maxDaemonHeapSize >> -Xmx<< parameters.maxDaemonHeapSize >> $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=768M -Ddatadog.forkedMinHeapSize=128M" - ./gradlew - << parameters.gradleTarget >> - << parameters.gradleParameters >> - -PtaskPartitionCount=${CIRCLE_NODE_TOTAL} -PtaskPartition=${CIRCLE_NODE_INDEX} - <<# parameters.testJvm >>-PtestJvm=<< parameters.testJvm >><> - << pipeline.parameters.gradle_flags >> - --max-workers=<< parameters.maxWorkers >> - --continue - <<# parameters.continueOnFailure >> || true <> - - - run: - name: Collect reports - when: on_fail - command: .circleci/collect_reports.sh - - - store_artifacts: - path: ./reports.tar - - - when: - condition: - equal: [true, << parameters.profile >>] - steps: - - run: - name: Collect profiles - when: always - command: .circleci/collect_profiles.sh - - - store_artifacts: - path: ./profiles.tar - - - run: - name: Collect test results - when: always - command: .circleci/collect_results.sh - - - store_test_results: - path: ./results - - - display_memory_usage - - - early_return_for_forked_pull_requests - - - run: - name: Upload test results to Datadog - when: always - command: .circleci/upload_ciapp.sh << parameters.stage >> << parameters.testJvm >> || true - - - run: - name: Get APM Test Agent Trace Check Results - when: always - command: | - set +e # Disable exiting from testagent response failure - SUMMARY_RESPONSE=$(curl -s -w "\n%{http_code}" -o summary_response.txt http://localhost:8126/test/trace_check/summary) - set -e - SUMMARY_RESPONSE_CODE=$(echo "$SUMMARY_RESPONSE" | awk 'END {print $NF}') - - if [[ SUMMARY_RESPONSE_CODE -eq 200 ]]; then - echo "APM Test Agent is running. (HTTP 200)" - else - echo "APM Test Agent is not running and was not used for testing. No checks failed." - exit 0 - fi - - RESPONSE=$(curl -s -w "\n%{http_code}" -o response.txt http://localhost:8126/test/trace_check/failures) - RESPONSE_CODE=$(echo "$RESPONSE" | awk 'END {print $NF}') - - if [[ $RESPONSE_CODE -eq 200 ]]; then - echo "All APM Test Agent Check Traces returned successful! (HTTP 200)" - echo "APM Test Agent Check Traces Summary Results:" - cat summary_response.txt | jq '.' - elif [[ $RESPONSE_CODE -eq 404 ]]; then - echo "Real APM Agent running in place of TestAgent, no checks to validate!" - else - echo "APM Test Agent Check Traces failed with response code: $RESPONSE_CODE" - echo "Failures:" - cat response.txt - echo "APM Test Agent Check Traces Summary Results:" - cat summary_response.txt | jq '.' - exit 1 - fi - - xlarge_tests: - <<: *tests - + nightly: + type: boolean + default: false + weekly: + type: boolean + default: false +orbs: + continuation: circleci/continuation@0.1.2 +executors: + python310: docker: - - image: << pipeline.parameters.docker_image >>:<< parameters.testJvm >> - environment: - - CI_USE_TEST_AGENT=true - - image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.11.0 - environment: - - LOG_LEVEL=DEBUG - - TRACE_LANGUAGE=java - - DD_SUPPRESS_TRACE_PARSE_ERRORS=true - - DD_POOL_TRACE_CHECK_FAILURES=true - - DD_DISABLE_ERROR_RESPONSES=true - - ENABLED_CHECKS=trace_content_length,trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service,trace_dd_service - resource_class: xlarge - - - # The only way to do fan-in in CircleCI seems to have a proper job, so let's have one that - # doesn't consume so many resources. The execution time for this including spin up seems to - # be around 6 seconds. - fan_in: + - image: *python310_image resource_class: small - - docker: - - image: alpine - - parameters: - testJvm: - type: string - default: "all configured JVMs" - stage: - type: string - - steps: - - run: - name: Completed stage << parameters.stage >> on << parameters.testJvm >> passed! - command: echo '<< parameters.stage >> completed!' - - agent_integration_tests: - <<: *tests - - resource_class: medium - - docker: - - image: << pipeline.parameters.docker_image >>:7 - - image: datadog/agent:7.34.0 - environment: - - DD_APM_ENABLED=true - - DD_BIND_HOST=0.0.0.0 - - DD_API_KEY=invalid_key_but_this_is_fine - - test_published_artifacts: - <<: *defaults - resource_class: medium - docker: - - image: << pipeline.parameters.docker_image >>:7 - - steps: - - setup_code - - restore_dependency_cache: - cacheType: lib - - restore_build_cache: - cacheType: lib - - - run: - name: Publish Artifacts Locally - command: | - mvn_local_repo=$(./mvnw help:evaluate -Dexpression=settings.localRepository -q -DforceStdout) - rm -rf "${mvn_local_repo}/com/datadoghq" - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew publishToMavenLocal << pipeline.parameters.gradle_flags >> --max-workers=3 - - - run: - name: Test Published Artifacts - command: | - cd test-published-dependencies - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx512M -Xms512M -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew check --info --max-workers=3 - - - run: - name: Collect Reports - when: on_fail - command: .circleci/collect_reports.sh - - - store_artifacts: - path: ./reports - - - display_memory_usage - - muzzle: - <<: *defaults - resource_class: medium - parallelism: 3 +jobs: + setup: + executor: python310 steps: - - setup_code - - - skip_unless_matching_files_changed: - pattern: "dd-java-agent/instrumentation" - - # We are not running with a separate cache of all muzzle artifacts here because it gets very big and - # ends up taking more time restoring/saving than the actual increase in time it takes just - # downloading the artifacts each time. - # - # Let's at least restore the build cache to have something to start from. - - restore_dependency_cache: - cacheType: inst - - restore_build_cache: - cacheType: inst - + - checkout - run: - name: Gather muzzle tasks - command: >- - SKIP_BUILDSCAN="true" - ./gradlew writeMuzzleTasksToFile - << pipeline.parameters.gradle_flags >> - --max-workers=3 - + name: Install dependencies + command: pip3 install jinja2 requests - run: - name: Verify Muzzle + name: Generate config command: >- - SKIP_BUILDSCAN="true" - GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew `circleci tests split --split-by=timings workspace/build/muzzleTasks | xargs` - << pipeline.parameters.gradle_flags >> - --max-workers=4 - - - run: - name: Collect Reports - when: on_fail - command: .circleci/collect_reports.sh - - - store_artifacts: - path: ./reports - - - store_test_results: - path: workspace/build/muzzle-test-results - - - display_memory_usage - - system-tests: - machine: - # https://support.circleci.com/hc/en-us/articles/360007324514-How-can-I-use-Docker-volume-mounting-on-CircleCI- - image: ubuntu-2004:current - resource_class: medium - parameters: - weblog-variant: - type: string - steps: - - setup_system_tests - - - run: - name: Copy jar file to system test binaries folder - command: | - ls -la ~/dd-trace-java/workspace/dd-java-agent/build/libs - cp ~/dd-trace-java/workspace/dd-java-agent/build/libs/*.jar system-tests/binaries/ - - - run: - name: Build - command: | - cd system-tests - ./build.sh java --weblog-variant << parameters.weblog-variant >> - - - run: - name: Run - command: | - cd system-tests - DD_API_KEY=$SYSTEM_TESTS_DD_API_KEY ./run.sh - - - run: - name: Run APM E2E default tests - # Stop the job after 5m to avoid excessive overhead. Will need adjustment as more tests are added. - no_output_timeout: 5m - command: | - cd system-tests - DD_SITE=datadoghq.com DD_API_KEY=$SYSTEM_TESTS_E2E_DD_API_KEY DD_APPLICATION_KEY=$SYSTEM_TESTS_E2E_DD_APP_KEY ./run.sh APM_TRACING_E2E - - - run: - name: Run APM E2E Single Span tests - # Stop the job after 5m to avoid excessive overhead. Will need adjustment as more tests are added. - no_output_timeout: 5m - command: | - cd system-tests - DD_SITE=datadoghq.com DD_API_KEY=$SYSTEM_TESTS_E2E_DD_API_KEY DD_APPLICATION_KEY=$SYSTEM_TESTS_E2E_DD_APP_KEY ./run.sh APM_TRACING_E2E_SINGLE_SPAN - - - run: - name: Upload data to CI Visibility - command: | - cd system-tests - export DD_API_KEY=$SYSTEM_TESTS_CI_API_KEY - export DD_APP_KEY=$SYSTEM_TESTS_CI_APP_KEY - - # Causes conflicts with DD_API_KEY and datadog-ci tool - unset DATADOG_API_KEY - - echo "Uploading tests results to CI Visibility" - utils/scripts/upload_results_CI_visibility.sh dev java-tracer << pipeline.id >>-<< pipeline.number >> - - if [[ $CIRCLE_BRANCH == "master" ]]; then - echo "Updating dashboard from dd-trace-java main branch" - utils/scripts/update_dashboard_CI_visibility.sh java-tracer << pipeline.id >>-<< pipeline.number >> - else - echo "Skipping CI Visibility dashboard update due to it is not a main branch" - fi - - - run: - name: Collect artifacts - command: tar -cvzf logs_java_<< parameters.weblog-variant >>_dev.tar.gz -C system-tests logs logs_apm_tracing_e2e logs_apm_tracing_e2e_single_span - - - store_artifacts: - path: logs_java_<< parameters.weblog-variant >>_dev.tar.gz - - integrations-system-tests: - machine: - # https://support.circleci.com/hc/en-us/articles/360007324514-How-can-I-use-Docker-volume-mounting-on-CircleCI- - image: ubuntu-2004:current - resource_class: medium - steps: - - setup_system_tests - - - run: - name: Copy jar file to system test binaries folder - command: | - ls -la ~/dd-trace-java/workspace/dd-java-agent/build/libs - cp ~/dd-trace-java/workspace/dd-java-agent/build/libs/*.jar system-tests/binaries/ - - - run: - name: Build - # Build the default framework, which is springboot - command: | - cd system-tests - ./build.sh java - - - run: - name: Run APM Integrations tests - # Stop the job after 5m to avoid excessive overhead. Will need adjustment as more tests are added. - no_output_timeout: 5m - command: | - cd system-tests - DD_SITE=datadoghq.com DD_API_KEY=$SYSTEM_TESTS_E2E_DD_API_KEY DD_APPLICATION_KEY=$SYSTEM_TESTS_E2E_DD_APP_KEY ./run.sh INTEGRATIONS - - - store_test_results: - path: system-tests/logs_integrations - - - store_artifacts: - path: system-tests/logs_integrations - - parametric-tests: - machine: - # https://support.circleci.com/hc/en-us/articles/360007324514-How-can-I-use-Docker-volume-mounting-on-CircleCI- - image: ubuntu-2004:current - resource_class: large - steps: - - setup_system_tests - - - run: - name: Copy jar files to system test binaries folder - command: | - ls -la ~/dd-trace-java/workspace/dd-trace-api/build/libs - ls -la ~/dd-trace-java/workspace/dd-java-agent/build/libs - cp ~/dd-trace-java/workspace/dd-trace-api/build/libs/*.jar system-tests/binaries/ - cp ~/dd-trace-java/workspace/dd-java-agent/build/libs/*.jar system-tests/binaries/ - - - run: - name: Install requirements - command: | - cd system-tests - pyenv local system - python3.9 --version - python3.9 -m pip install wheel - python3.9 -m pip install -r requirements.txt - sudo ln -sf /usr/bin/python3.9 /usr/bin/python - - - run: - name: Run - command: | - set -e - cd system-tests - export TEST_LIBRARY=java - export PYTEST_WORKER_COUNT=8 - ./build.sh -i runner - ./run.sh PARAMETRIC --log-cli-level=DEBUG --durations=30 -vv - - - store_test_results: - path: system-tests/logs_parametric - - - run: - name: Collect artifacts - command: tar -cvzf logs_java_parametric_dev.tar.gz -C system-tests logs_parametric - - - store_artifacts: - path: logs_java_parametric_dev.tar.gz - - -build_test_jobs: &build_test_jobs - - build: - name: build_lib - gradleTarget: shadowJar - cacheType: lib - collectLibs: true - - build: - name: build_base - gradleTarget: :baseTest - cacheType: base - - build: - name: build_inst - gradleTarget: :instrumentationTest - cacheType: inst - - build: - name: build_latestdep - gradleTarget: :instrumentationLatestDepTest - cacheType: latestdep - - build: - name: build_smoke - gradleTarget: :smokeTest - cacheType: smoke - - build: - name: build_profiling - gradleTarget: :profilingTest - cacheType: profiling - - spotless - - - fan_in: - requires: - - build_lib - - build_base - - build_inst - - build_smoke - - build_profiling - - spotless - name: ok_to_test - stage: ok_to_test - - - check: - requires: - - ok_to_test - name: check_base - gradleTarget: ":baseCheck" - cacheType: base - - - check: - requires: - - ok_to_test - name: check_inst - parallelism: 4 - gradleTarget: ":instrumentationCheck" - cacheType: inst - - - check: - requires: - - ok_to_test - name: check_smoke - gradleTarget: ":smokeCheck" - cacheType: smoke - - - check: - requires: - - ok_to_test - name: check_profiling - gradleTarget: ":profilingCheck" - cacheType: profiling - - - fan_in: - requires: - - check_base - - check_inst - - check_smoke - - check_profiling - name: check - stage: check - - - tests: - requires: - - ok_to_test - name: z_test_<< matrix.testJvm >>_base - triggeredBy: *core_modules - gradleTarget: ":baseTest" - gradleParameters: "-PskipFlakyTests -PskipInstTests -PskipSmokeTests -PskipProfilingTests" - stage: core - cacheType: base - parallelism: 4 - maxWorkers: 4 - matrix: - <<: *test_matrix - - - tests: - requires: - - ok_to_test - name: z_test_8_base - triggeredBy: *core_modules - gradleTarget: :baseTest jacocoTestReport jacocoTestCoverageVerification - gradleParameters: "-PskipFlakyTests -PskipInstTests -PskipSmokeTests -PskipProfilingTests" - stage: core - cacheType: base - parallelism: 4 - maxWorkers: 4 - testJvm: "8" - - - xlarge_tests: - requires: - - ok_to_test - name: z_test_<< matrix.testJvm >>_inst - gradleTarget: ":instrumentationTest" - gradleParameters: "-PskipFlakyTests" - triggeredBy: *instrumentation_modules - stage: instrumentation - cacheType: inst - parallelism: 4 - maxWorkers: 4 - matrix: - <<: *test_matrix - - - xlarge_tests: - requires: - - ok_to_test - name: z_test_8_inst - gradleTarget: ":instrumentationTest" - gradleParameters: "-PskipFlakyTests" - triggeredBy: *instrumentation_modules - stage: instrumentation - cacheType: inst - parallelism: 4 - maxWorkers: 4 - testJvm: "8" - - - xlarge_tests: - requires: - - ok_to_test - - build_latestdep - name: test_8_inst_latest - gradleTarget: ":instrumentationLatestDepTest" - gradleParameters: "-PskipFlakyTests" - triggeredBy: *instrumentation_modules - stage: instrumentation - cacheType: latestdep - parallelism: 4 - maxWorkers: 4 - testJvm: "8" - - - tests: - requires: - - ok_to_test - name: z_test_8_flaky_base - gradleTarget: ":baseTest" - gradleParameters: "-PrunFlakyTests" - continueOnFailure: true - triggeredBy: *core_modules - stage: core - cacheType: base - parallelism: 4 - maxWorkers: 4 - testJvm: "8" - - - xlarge_tests: - requires: - - ok_to_test - name: z_test_8_flaky_inst - gradleTarget: ":instrumentationTest" - gradleParameters: "-PrunFlakyTests" - continueOnFailure: true - triggeredBy: *instrumentation_modules - stage: instrumentation - cacheType: inst - parallelism: 2 - maxWorkers: 4 - testJvm: "8" - - - tests: - requires: - - ok_to_test - name: z_test_8_flaky_smoke - gradleTarget: ":smokeTest" - gradleParameters: "-PrunFlakyTests" - continueOnFailure: true - stage: smoke - cacheType: smoke - parallelism: 4 - maxWorkers: 4 - testJvm: "8" - - - tests: - requires: - - ok_to_test - maxWorkers: 4 - gradleTarget: ":profilingTest" - gradleParameters: "-PskipFlakyTests" - triggeredBy: *profiling_modules - stage: profiling - cacheType: profiling - name: test_<< matrix.testJvm >>_profiling - matrix: - <<: *profiling_test_matrix - - - tests: - requires: - - ok_to_test - name: test_<< matrix.testJvm >>_debugger - maxWorkers: 4 - gradleTarget: ":debuggerTest" - gradleParameters: "-PskipFlakyTests" - triggeredBy: *debugger_modules - stage: debugger - cacheType: base - matrix: - <<: *profiling_test_matrix - - - tests: - requires: - - ok_to_test - name: z_test_<< matrix.testJvm >>_smoke - gradleTarget: "stageMainDist :smokeTest" - gradleParameters: "-PskipFlakyTests" - stage: smoke - cacheType: smoke - parallelism: 4 - maxWorkers: 3 - matrix: - <<: *test_matrix - - - tests: - requires: - - ok_to_test - name: test_semeru11_smoke - gradleTarget: "stageMainDist :smokeTest" - gradleParameters: "-PskipFlakyTests" - stage: smoke - cacheType: smoke - parallelism: 4 - maxWorkers: 3 - testJvm: "semeru11" - - - tests: - requires: - - ok_to_test - name: test_semeru17_smoke - gradleTarget: "stageMainDist :smokeTest" - gradleParameters: "-PskipFlakyTests" - stage: smoke - cacheType: smoke - parallelism: 4 - maxWorkers: 3 - testJvm: "semeru17" - - - tests: - requires: - - ok_to_test - name: test_graalvm17_smoke - gradleTarget: "stageMainDist :dd-smoke-test:spring-boot-3.0-native:test" - stage: smoke - cacheType: smoke - testJvm: "graalvm17" - - - tests: - requires: - - ok_to_test - name: z_test_8_smoke - gradleTarget: "stageMainDist :smokeTest" - gradleParameters: "-PskipFlakyTests" - stage: smoke - cacheType: smoke - parallelism: 4 - maxWorkers: 3 - testJvm: "8" - - - fan_in: - requires: - - z_test_<< matrix.testJvm >>_base - - z_test_<< matrix.testJvm >>_inst - - z_test_<< matrix.testJvm >>_smoke - name: test_<< matrix.testJvm >> - stage: tracing - matrix: - <<: *test_matrix - - - fan_in: - requires: - - z_test_8_base - - z_test_8_inst - - z_test_8_smoke - name: test_8 - stage: tracing - testJvm: "8" - - - agent_integration_tests: - requires: - - ok_to_test - triggeredBy: *agent_integration_tests_modules - gradleTarget: traceAgentTest - cacheType: base - testJvm: "8" - - - test_published_artifacts: - requires: - - ok_to_test - - - muzzle: - requires: - - ok_to_test - filters: - branches: - ignore: - - master - - project/* - - release/* - - - system-tests: - requires: - - ok_to_test - matrix: - <<: *system_test_matrix - - - integrations-system-tests: - requires: - - ok_to_test - - - parametric-tests: - requires: - - ok_to_test - - - fan_in: - requires: - - test_published_artifacts - - test_8_profiling - - test_oracle8_profiling - - test_zulu8_profiling - - test_zulu11_profiling - - test_11_profiling - - test_17_profiling - name: profiling - stage: profiling - - - fan_in: - requires: - - test_published_artifacts - - test_8_debugger - - test_oracle8_debugger - - test_zulu8_debugger - - test_zulu11_debugger - - test_11_debugger - - test_17_debugger - name: debugger - stage: debugger - - # This job requires all the jobs needed for a successful build, so GitHub only needs to enforce this one, - # and it will be simpler to require different JVM versions for different branches and old releases - - fan_in: - requires: - - check - - test_published_artifacts - - agent_integration_tests - - test_8 - - test_ibm8 - - test_11 - - test_semeru11_smoke - - test_17 - - test_semeru17_smoke - - test_zulu8 - - profiling - - debugger - name: required - stage: required - + CIRCLE_IS_NIGHTLY="<< pipeline.parameters.nightly >>" + CIRCLE_IS_WEEKLY="<< pipeline.parameters.weekly >>" + .circleci/render_config.py + - continuation/continue: + configuration_path: .circleci/config.continue.yml workflows: - build_test: - jobs: - *build_test_jobs - - nightly: - triggers: - - schedule: - # Run this job at 00:35 UTC every day - # The 30 minutes will allow weekly to finish before nightly is triggered on Mondays - cron: "35 0 * * *" - filters: - branches: - only: - - master - jobs: - *build_test_jobs - - weekly: - triggers: - - schedule: - # Run this job at 00:05 UTC every Monday - cron: "5 0 * * 1" - filters: - branches: - only: - - master + setup: jobs: - # This will rebuild a main caches with a new timestamp from a clean slate - - build_clean_cache: - name: build_cache_lib - gradleTarget: shadowJar - cacheType: lib - collectLibs: false - - build_clean_cache: - name: build_cache_base - gradleTarget: :baseTest - cacheType: base - - build_clean_cache: - name: build_cache_inst - gradleTarget: :instrumentationTest - cacheType: inst - - build_clean_cache: - name: build_cache_latestdep - gradleTarget: :instrumentationLatestDepTest - cacheType: latestdep - - build_clean_cache: - name: build_cache_smoke - gradleTarget: :smokeTest - cacheType: smoke - - build_clean_cache: - name: build_cache_profiling - gradleTarget: :profilingTest - cacheType: profiling + - setup diff --git a/.circleci/render_config.py b/.circleci/render_config.py new file mode 100755 index 00000000000..51cd43d413d --- /dev/null +++ b/.circleci/render_config.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python3 + +import os +import os.path +import time + +import jinja2 +import requests + +SCRIPT_DIR = os.path.dirname(__file__) + +TPL_FILENAME = "config.continue.yml.j2" +OUT_FILENAME = "config.continue.yml" +GENERATED_CONFIG_PATH = os.path.join(SCRIPT_DIR, OUT_FILENAME) + +# JDKs that will run on every pipeline. +ALWAYS_ON_JDKS = {"8", "11", "17"} +# And these will run only in master and release/ branches. +MASTER_ONLY_JDKS = { + "ibm8", + "oracle8", + "semeru8", + "zulu8", + "semeru11", + "zulu11", + "semeru17", + "ubuntu17", +} +# Version to use for all the base Docker images, see +# https://github.com/DataDog/dd-trace-java-docker-build/pkgs/container/dd-trace-java-docker-build +DOCKER_IMAGE_VERSION="v23.09" + +# Get labels from pull requests to override some defaults for jobs to run. +# `run-tests: all` will run all tests. +# `run-tests: ibm8` will run the IBM 8 tests. +# `run-tests: flaky` for flaky tests jobs. +pr_url = os.environ.get("CIRCLE_PULL_REQUEST") +if pr_url: + pr_num = int(pr_url.split("/")[-1]) + headers = {} + gh_token = os.environ.get("GH_TOKEN") + if gh_token: + headers["Authorization"] = gh_token + else: + print("Missing GH_TOKEN, trying anonymously") + for _ in range(20): + try: + resp = requests.get( + f"https://api.github.com/repos/DataDog/dd-trace-java/pulls/{pr_num}", + timeout=1, + headers=headers, + ) + resp.raise_for_status() + except Exception as e: + print(f"Request filed: {e}") + time.sleep(1) + continue + data = resp.json() + break + + labels = data.get("labels", []) + labels = [l["name"] for l in labels] + labels = { + l.replace("run-tests: ", "") for l in labels if l.startswith("run-tests: ") + } +else: + labels = set() + + +branch = os.environ.get("CIRCLE_BRANCH", "") +if branch == "master" or branch.startswith("release/v") or "all" in labels: + all_jdks = ALWAYS_ON_JDKS | MASTER_ONLY_JDKS +else: + all_jdks = ALWAYS_ON_JDKS | (MASTER_ONLY_JDKS & labels) +nocov_jdks = [j for j in all_jdks if j != "8"] +# specific list for debugger project because J9-based JVM have issues with local vars +# so need to test at least against one J9-based JVM +all_debugger_jdks = all_jdks | {"semeru8"} + +# Is this a nightly or weekly build? These environment variables are set in +# config.yml based on pipeline parameters. +is_nightly = os.environ.get("CIRCLE_IS_NIGHTLY", "false") == "true" +is_weekly = os.environ.get("CIRCLE_IS_WEEKLY", "false") == "true" +is_regular = not is_nightly and not is_weekly + +vars = { + "is_nightly": is_nightly, + "is_weekly": is_weekly, + "is_regular": is_regular, + "all_jdks": all_jdks, + "all_debugger_jdks": all_debugger_jdks, + "nocov_jdks": nocov_jdks, + "flaky": branch == "master" or "flaky" in labels or "all" in labels, + "docker_image_prefix": "" if is_nightly else f"{DOCKER_IMAGE_VERSION}-", +} + +print(f"Variables for this build: {vars}") + +loader = jinja2.FileSystemLoader(searchpath=SCRIPT_DIR) +env = jinja2.Environment(loader=loader) +tpl = env.get_template(TPL_FILENAME) +out = tpl.render(**vars) + +with open(GENERATED_CONFIG_PATH, "w", encoding="utf-8") as f: + f.write(out) diff --git a/.circleci/update_pinned_system_tests.sh b/.circleci/update_pinned_system_tests.sh index 9263b234256..cb28661574c 100755 --- a/.circleci/update_pinned_system_tests.sh +++ b/.circleci/update_pinned_system_tests.sh @@ -8,7 +8,7 @@ if [[ -n $(git diff --stat) ]]; then exit 1 fi -current_commit="$(grep ^default_system_tests_commit: config.yml | sed -e 's~^.* ~~g')" +current_commit="$(grep ^default_system_tests_commit: config.continue.yml.j2 | sed -e 's~^.* ~~g')" latest_commit="$(git ls-remote git@github.com:DataDog/system-tests.git refs/heads/main | cut -f 1)" echo "Current commit: $current_commit" @@ -19,6 +19,6 @@ if [[ "$current_commit" = "$latest_commit" ]]; then fi echo "Updating config.yml" -sed -i -e "s~${current_commit?}~${latest_commit?}~g" config.yml -git diff config.yml | cat -git commit -m "Update system-tests to $latest_commit" config.yml +sed -i -e "s~${current_commit?}~${latest_commit?}~g" config.continue.yml.j2 +git diff config.continue.yml.j2 | cat +git commit -m "Update system-tests to $latest_commit" config.continue.yml.j2 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index bb7ba140484..658580cf016 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,8 +1,10 @@ # Automatically assign the team as a reviewer. # https://help.github.com/en/articles/about-code-owners +# Default owners, overridden by file/directory specific owners below * @DataDog/apm-java +# @DataDog/profiling-java dd-java-agent/agent-profiling/ @DataDog/profiling-java dd-java-agent/agent-crashtracking/ @DataDog/profiling-java dd-java-agent/instrumentation/exception-profiling/ @DataDog/profiling-java @@ -11,8 +13,6 @@ dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentat dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/ @DataDog/profiling-java dd-smoke-tests/profiling-integration-tests/ @DataDog/profiling-java -dd-java-agent/appsec/ @DataDog/appsec-java - # @DataDog/ci-app-libraries-java dd-java-agent/agent-ci-visibility/ @DataDog/ci-app-libraries-java dd-java-agent/instrumentation/cucumber/ @DataDog/ci-app-libraries-java @@ -32,9 +32,11 @@ dd-smoke-tests/maven/ @DataDog/ci-app-libraries-java dd-java-agent/agent-debugger/ @DataDog/debugger-java dd-smoke-tests/debugger-integration-tests/ @DataDog/debugger-java -# @DataDog/iast-java -dd-java-agent/agent-iast/ @DataDog/iast-java -dd-java-agent/instrumentation/iast-instrumenter @DataDog/iast-java -**/iast/ @DataDog/iast-java -**/Iast*.java @DataDog/iast-java -**/Iast*.groovy @DataDog/iast-java +# @DataDog/asm-java (AppSec/IAST) +dd-java-agent/agent-iast/ @DataDog/asm-java +dd-java-agent/instrumentation/*iast* @DataDog/asm-java +dd-java-agent/instrumentation/*appsec* @DataDog/asm-java +**/appsec/ @DataDog/asm-java +**/iast/ @DataDog/asm-java +**/Iast*.java @DataDog/asm-java +**/Iast*.groovy @DataDog/asm-java diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 330b3a1c54e..a952e2a36dc 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -3,3 +3,14 @@ # Motivation # Additional Notes + +Jira ticket: [PROJ-IDENT] + + diff --git a/.github/workflows/README.md b/.github/workflows/README.md index b7a80bccc5c..69460820818 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -76,7 +76,7 @@ _Action:_ _Recovery:_ Check at the milestone for the related issues and update them manually. -## Code Quality +## Code Quality and Security ### codeql-analysis [🔗](codeql-analysis.yml) @@ -84,12 +84,20 @@ _Trigger:_ When pushing commits to `master` or any pull request to `master` _Action:_ Run GitHub CodeQL action and upload result to GitHub security tab. +### trivy-analysis [🔗](trivy-analysis.yml) + +_Trigger:_ When pushing commits to `master` or any pull request to `master` + +_Action:_ Run Trivy security scanner on built artifacts and upload result to GitHub security tab. + ### gradle-wrapper-validation [🔗](gradle-wrapper-validation.yaml.disabled) **DISABLED** - GitHub provides a way to disable actions rather than changing their extensions. _Comment:_ To delete? +## Lib Injection + ### lib-injection [🔗](lib-injection.yaml) _Trigger:_ When pushing commits to `master`, release branches or any PR targetting `master`, and when creating tags diff --git a/.github/workflows/draft-release-notes-on-tag.yaml b/.github/workflows/draft-release-notes-on-tag.yaml index 69d753b6afa..1b2384a4723 100644 --- a/.github/workflows/draft-release-notes-on-tag.yaml +++ b/.github/workflows/draft-release-notes-on-tag.yaml @@ -117,17 +117,21 @@ jobs: } return line } + function cleanUpTitle(title) { + // Remove tags between brackets + return title.replace(/\[[^\]]+\]/g, '') + } function format(pullRequest) { - var line = `${decorate(pullRequest)}${pullRequest.title} (#${pullRequest.number}` - // Add author if community labeled + var line = `${decorate(pullRequest)}${cleanUpTitle(pullRequest.title)} (#${pullRequest.number} - @${pullRequest.user.login}` + // Add special thanks if community labeled if (pullRequest.labels.some(label => label.name == "tag: community")) { - line += ` - thanks @${pull.user.login} for the contribution!` + line += ` - thanks for the contribution!` } line += ')' return line; } - var changelog = ''; + var changelog = '' if (prByComponents.size > 0) { changelog += '# Components\n\n'; for (let pair of prByComponents) { diff --git a/.github/workflows/lib-injection.yaml b/.github/workflows/lib-injection.yaml index 01bc880f43f..079b5fc9627 100644 --- a/.github/workflows/lib-injection.yaml +++ b/.github/workflows/lib-injection.yaml @@ -33,7 +33,7 @@ jobs: - name: Set up Docker platforms id: buildx-platforms run: | - BUILDX_PLATFORMS=`docker buildx imagetools inspect --raw busybox:latest | jq -r 'reduce (.manifests[] | [ .platform.os, .platform.architecture, .platform.variant ] | join("/") | sub("\\/$"; "")) as $item (""; . + "," + $item)' | sed 's/,//'` + BUILDX_PLATFORMS=`docker buildx imagetools inspect --raw alpine:3.18.3 | jq -r 'reduce (.manifests[] | [ .platform.os, .platform.architecture, .platform.variant ] | join("/") | sub("\\/$"; "")) as $item (""; . + "," + $item)' | sed 's/,//'` echo "$BUILDX_PLATFORMS" echo "platforms=$BUILDX_PLATFORMS" >> $GITHUB_OUTPUT diff --git a/.github/workflows/trivy-analysis.yml b/.github/workflows/trivy-analysis.yml new file mode 100644 index 00000000000..f7bc4d80fb2 --- /dev/null +++ b/.github/workflows/trivy-analysis.yml @@ -0,0 +1,59 @@ +name: "Trivy Security Analysis" + +on: + push: + branches: [ master ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ master ] + +permissions: + contents: read + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + steps: + - name: Checkout repository + uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # 3.6.0 + + - name: Remove old artifacts + run: | + MVN_LOCAL_REPO=$(./mvnw help:evaluate -Dexpression=settings.localRepository -q -DforceStdout) + echo "MVN_LOCAL_REPO=${MVN_LOCAL_REPO}" >> "$GITHUB_ENV" + rm -rf "${MVN_LOCAL_REPO}/com/datadoghq" + + - name: Build and publish artifacts locally + run: | + GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G'" JAVA_HOME=$JAVA_HOME_8_X64 JAVA_8_HOME=$JAVA_HOME_8_X64 JAVA_11_HOME=$JAVA_HOME_11_X64 JAVA_17_HOME=$JAVA_HOME_17_X64 ./gradlew clean publishToMavenLocal --build-cache --parallel --stacktrace --no-daemon --max-workers=4 + + - name: Copy published artifacts + run: | + mkdir -p ./workspace/.trivy + cp -RP "${MVN_LOCAL_REPO}/com/datadoghq" ./workspace/.trivy/ + + - name: List copied artifacts + run: | + ls -laR "./workspace/.trivy" + + - name: Run Trivy security scanner + uses: aquasecurity/trivy-action@41f05d9ecffa2ed3f1580af306000f734b733e54 # v0.11.2 + with: + scan-type: rootfs + scan-ref: './workspace/.trivy/' + format: 'sarif' + output: 'trivy-results.sarif' + severity: 'CRITICAL,HIGH' + limit-severities-for-sarif: true + + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@00e563ead9f72a8461b24876bee2d0c2e8bd2ee8 # 2.15 + if: always() + with: + sarif_file: 'trivy-results.sarif' diff --git a/.gitignore b/.gitignore index 2e975650fca..fcedfdbdb4d 100644 --- a/.gitignore +++ b/.gitignore @@ -68,6 +68,7 @@ replay_pid* ############ _circle_ci_cache_* upstream.env +/.circleci/config.continue.yml # Benchmarks # benchmark/reports diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9e9b60a0af8..3be67d8c675 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -57,10 +57,13 @@ build: &build script: - GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx1900M -Xms512M' -Ddatadog.forkedMaxHeapSize=512M -Ddatadog.forkedMinHeapSize=128M" ./gradlew clean :dd-java-agent:shadowJar --build-cache --parallel --stacktrace --no-daemon --max-workers=8 - echo UPSTREAM_TRACER_VERSION=$(java -jar workspace/dd-java-agent/build/libs/*.jar) >> upstream.env + - echo "BUILD_JOB_NAME=$CI_JOB_NAME" >> build.env artifacts: paths: - 'workspace/dd-java-agent/build/libs/*.jar' - 'upstream.env' + reports: + dotenv: build.env build_with_cache: <<: *build @@ -75,7 +78,7 @@ build_with_cache: package: extends: .package - needs: [ build ] + when: on_success # this can't use 'needs: [build]', since build is not available in the scheduled pipeline script: - ../.gitlab/build_java_package.sh @@ -95,7 +98,7 @@ deploy_to_reliability_env: project: DataDog/apm-reliability/datadog-reliability-env branch: $DOWNSTREAM_BRANCH variables: - UPSTREAM_PACKAGE_JOB: build + UPSTREAM_PACKAGE_JOB: $BUILD_JOB_NAME UPSTREAM_PROJECT_ID: $CI_PROJECT_ID UPSTREAM_PROJECT_NAME: $CI_PROJECT_NAME UPSTREAM_PIPELINE_ID: $CI_PIPELINE_ID diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 82ea9628ce6..7c903bdffab 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,6 +3,8 @@ Pull requests for bug fixes are welcome, but before submitting new features or changes to current functionality [open an issue](https://github.com/DataDog/dd-trace-java/issues/new) and discuss your ideas or propose the changes you wish to make. After a resolution is reached a PR can be submitted for review. +When opening a pull request, please open it as a [draft](https://github.blog/2019-02-14-introducing-draft-pull-requests/) to not auto assign reviewers before you feel the pull request is in a reviewable state. + ## Requirements To build the full project: @@ -198,8 +200,6 @@ Suggested plugins and settings: * With java use the following import layout (groovy should still use the default) to ensure consistency with google-java-format: ![import layout](https://user-images.githubusercontent.com/734411/43430811-28442636-94ae-11e8-86f1-f270ddcba023.png) * [Google Java Format](https://plugins.jetbrains.com/plugin/8527-google-java-format) -* [Save Actions](https://plugins.jetbrains.com/plugin/7642-save-actions) - ![Recommended Settings](https://user-images.githubusercontent.com/35850765/124003079-838f4280-d9a4-11eb-9250-5c517631e362.png) ## Troubleshooting diff --git a/benchmark/load/insecure-bank/benchmark.json b/benchmark/load/insecure-bank/benchmark.json index 80a8a9b5639..422a0cda7d9 100644 --- a/benchmark/load/insecure-bank/benchmark.json +++ b/benchmark/load/insecure-bank/benchmark.json @@ -18,18 +18,6 @@ "JAVA_OPTS": "-javaagent:${TRACER}" } }, - "profiling": { - "env": { - "VARIANT": "profiling", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.profiling.enabled=true" - } - }, - "appsec": { - "env": { - "VARIANT": "appsec", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.appsec.enabled=true" - } - }, "iast": { "env": { "VARIANT": "iast", @@ -47,6 +35,12 @@ "VARIANT": "iast_INACTIVE", "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.iast.enabled=inactive" } + }, + "iast_TELEMETRY_OFF": { + "env": { + "VARIANT": "iast_TELEMETRY_OFF", + "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.iast.enabled=true -Ddd.iast.telemetry.verbosity=OFF" + } } } } diff --git a/benchmark/startup/insecure-bank/benchmark.json b/benchmark/startup/insecure-bank/benchmark.json index b4f40c5c3ac..e3d325b5d06 100644 --- a/benchmark/startup/insecure-bank/benchmark.json +++ b/benchmark/startup/insecure-bank/benchmark.json @@ -12,23 +12,17 @@ "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.benchmark.enabled=true -Ddd.benchmark.output.dir=${OUTPUT_DIR}/tracing" } }, - "profiling": { - "env": { - "VARIANT": "profiling", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.benchmark.enabled=true -Ddd.benchmark.output.dir=${OUTPUT_DIR}/profiling -Ddd.profiling.enabled=true" - } - }, - "appsec": { - "env": { - "VARIANT": "appsec", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.benchmark.enabled=true -Ddd.benchmark.output.dir=${OUTPUT_DIR}/appsec -Ddd.appsec.enabled=true" - } - }, "iast": { "env": { "VARIANT": "iast", "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.benchmark.enabled=true -Ddd.benchmark.output.dir=${OUTPUT_DIR}/iast -Ddd.iast.enabled=true" } + }, + "iast_TELEMETRY_OFF": { + "env": { + "VARIANT": "iast_TELEMETRY_OFF", + "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.benchmark.enabled=true -Ddd.benchmark.output.dir=${OUTPUT_DIR}/iast_TELEMETRY_OFF -Ddd.iast.enabled=true -Ddd.iast.telemetry.verbosity=OFF" + } } } } diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts index 8fc22d7e9d5..f1178c340ab 100644 --- a/buildSrc/build.gradle.kts +++ b/buildSrc/build.gradle.kts @@ -31,17 +31,17 @@ dependencies { implementation(gradleApi()) implementation(localGroovy()) - implementation("net.bytebuddy", "byte-buddy-gradle-plugin", "1.14.5") + implementation("net.bytebuddy", "byte-buddy-gradle-plugin", "1.14.8") implementation("org.eclipse.aether", "aether-connector-basic", "1.1.0") implementation("org.eclipse.aether", "aether-transport-http", "1.1.0") implementation("org.apache.maven", "maven-aether-provider", "3.3.9") implementation("com.google.guava", "guava", "20.0") - implementation("org.ow2.asm", "asm", "9.5") - implementation("org.ow2.asm", "asm-tree", "9.5") + implementation("org.ow2.asm", "asm", "9.6") + implementation("org.ow2.asm", "asm-tree", "9.6") - testImplementation("org.spockframework", "spock-core", "2.0-groovy-3.0") + testImplementation("org.spockframework", "spock-core", "2.2-groovy-3.0") testImplementation("org.codehaus.groovy", "groovy-all", "3.0.17") } diff --git a/buildSrc/call-site-instrumentation-plugin/src/main/java/datadog/trace/plugin/csi/impl/ext/IastExtension.java b/buildSrc/call-site-instrumentation-plugin/src/main/java/datadog/trace/plugin/csi/impl/ext/IastExtension.java index 8d6d1175fca..582f4a535b3 100644 --- a/buildSrc/call-site-instrumentation-plugin/src/main/java/datadog/trace/plugin/csi/impl/ext/IastExtension.java +++ b/buildSrc/call-site-instrumentation-plugin/src/main/java/datadog/trace/plugin/csi/impl/ext/IastExtension.java @@ -16,8 +16,10 @@ import com.github.javaparser.ast.expr.AnnotationExpr; import com.github.javaparser.ast.expr.AssignExpr; import com.github.javaparser.ast.expr.BooleanLiteralExpr; +import com.github.javaparser.ast.expr.CastExpr; import com.github.javaparser.ast.expr.Expression; import com.github.javaparser.ast.expr.FieldAccessExpr; +import com.github.javaparser.ast.expr.IntegerLiteralExpr; import com.github.javaparser.ast.expr.LambdaExpr; import com.github.javaparser.ast.expr.MemberValuePair; import com.github.javaparser.ast.expr.MethodCallExpr; @@ -172,7 +174,7 @@ private void addTelemetryToAdvice( final TypeResolver resolver, final LambdaExpr adviceLambda, final AdviceMetadata metaData) { final BlockStmt lambdaBody = adviceLambda.getBody().asBlockStmt(); final String metric = getMetricName(metaData); - final String tagValue = getMetricTagValue(resolver, metaData); + final Byte tagValue = getMetricTagValue(resolver, metaData); final String instrumentedMetric = "INSTRUMENTED_" + metric; final IfStmt instrumentedStatement = new IfStmt() @@ -197,7 +199,7 @@ private static Expression isEnabledCondition(final String metric) { } private static MethodCallExpr addTelemetryCollectorMethod( - final String metric, final String tagValue) { + final String metric, final Byte tagValue) { final MethodCallExpr method = new MethodCallExpr() .setScope(new NameExpr(IAST_METRIC_COLLECTOR_CLASS)) @@ -205,14 +207,16 @@ private static MethodCallExpr addTelemetryCollectorMethod( .addArgument( new FieldAccessExpr().setScope(new NameExpr(IAST_METRIC_CLASS)).setName(metric)); if (tagValue != null) { - method.addArgument(new StringLiteralExpr(tagValue)); + method.addArgument( + new CastExpr() + .setExpression(new IntegerLiteralExpr(Byte.toString(tagValue))) + .setType(byte.class)); } method.addArgument(intLiteral(1)); return method; } - private static BlockStmt addTelemetryCollectorByteCode( - final String metric, final String tagValue) { + private static BlockStmt addTelemetryCollectorByteCode(final String metric, final Byte tagValue) { final BlockStmt stmt = new BlockStmt(); // this code generates the java source code needed to provide the bytecode for the statement // IastTelemetryCollector.add(${metric}, 1); or IastTelemetryCollector.add(${metric}, ${tag}, @@ -227,11 +231,7 @@ private static BlockStmt addTelemetryCollectorByteCode( .addArgument(new StringLiteralExpr(metric)) .addArgument(new StringLiteralExpr("L" + IAST_METRIC_INTERNAL_NAME + ";"))); if (tagValue != null) { - stmt.addStatement( - new MethodCallExpr() - .setScope(new NameExpr("handler")) - .setName("loadConstant") - .addArgument(new StringLiteralExpr(tagValue))); + stmt.addStatement(pushByteExpression(tagValue)); } stmt.addStatement( new MethodCallExpr() @@ -241,7 +241,7 @@ private static BlockStmt addTelemetryCollectorByteCode( new FieldAccessExpr().setScope(new NameExpr(OPCODES_FQDN)).setName("ICONST_1"))); final String descriptor = tagValue != null - ? "(L" + IAST_METRIC_INTERNAL_NAME + ";Ljava/lang/String;I)V" + ? "(L" + IAST_METRIC_INTERNAL_NAME + ";BI)V" : "(L" + IAST_METRIC_INTERNAL_NAME + ";I)V"; stmt.addStatement( new MethodCallExpr() @@ -261,26 +261,26 @@ private static String getMetricName(final AdviceMetadata metaData) { return kind.getName().getId().toUpperCase(); } - private static String getMetricTagValue( + private static Byte getMetricTagValue( final TypeResolver resolver, final AdviceMetadata metadata) { if (metadata.getTag() == null) { return null; } final Expression tag = metadata.getTag(); - if (tag.isStringLiteralExpr()) { - return tag.asStringLiteralExpr().getValue(); + if (tag.isIntegerLiteralExpr()) { + return tag.asIntegerLiteralExpr().asNumber().byteValue(); } else { return getFieldValue(resolver, tag.asFieldAccessExpr()); } } - private static String getFieldValue(final TypeResolver resolver, final FieldAccessExpr tag) { + private static Byte getFieldValue(final TypeResolver resolver, final FieldAccessExpr tag) { final FieldAccessExpr fieldAccessExpr = tag.asFieldAccessExpr(); final ResolvedFieldDeclaration value = fieldAccessExpr.resolve().asField(); try { final Field field = getField(value); field.setAccessible(true); - return (String) field.get(field.getDeclaringClass()); + return (Byte) field.get(field.getDeclaringClass()); } catch (Exception e) { throw new RuntimeException(e); } @@ -420,6 +420,31 @@ private static Expression getAnnotationExpression(final AnnotationExpr expr) { } } + public static Expression pushByteExpression(final byte value) { + final FieldAccessExpr opCodes = new FieldAccessExpr().setScope(new NameExpr(OPCODES_FQDN)); + final MethodCallExpr result = + new MethodCallExpr().setScope(new NameExpr("handler")).setName("instruction"); + switch (value) { + case -1: + result.addArgument(opCodes.setName("ICONST_M1")); + break; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + result.addArgument(opCodes.setName("ICONST_" + value)); + break; + default: + result + .addArgument(opCodes.setName("BIPUSH")) + .addArgument(new IntegerLiteralExpr(Integer.toString(value))); + break; + } + return result; + } + private static class AdviceMetadata { private final AnnotationExpr kind; private final Expression tag; diff --git a/buildSrc/call-site-instrumentation-plugin/src/test/groovy/datadog/trace/plugin/csi/impl/ext/IastExtensionTest.groovy b/buildSrc/call-site-instrumentation-plugin/src/test/groovy/datadog/trace/plugin/csi/impl/ext/IastExtensionTest.groovy index ad53446e866..e9789179140 100644 --- a/buildSrc/call-site-instrumentation-plugin/src/test/groovy/datadog/trace/plugin/csi/impl/ext/IastExtensionTest.groovy +++ b/buildSrc/call-site-instrumentation-plugin/src/test/groovy/datadog/trace/plugin/csi/impl/ext/IastExtensionTest.groovy @@ -13,6 +13,7 @@ import datadog.trace.plugin.csi.impl.assertion.AdviceAssert import datadog.trace.plugin.csi.impl.assertion.AssertBuilder import datadog.trace.plugin.csi.impl.assertion.CallSiteAssert import datadog.trace.plugin.csi.impl.ext.tests.IastExtensionCallSite +import datadog.trace.plugin.csi.impl.ext.tests.SourceTypes import groovy.transform.CompileDynamic import spock.lang.TempDir @@ -85,18 +86,32 @@ class IastExtensionTest extends BaseCsiPluginTest { advices(0) { pointcut('javax/servlet/http/HttpServletRequest', 'getHeader', '(Ljava/lang/String;)Ljava/lang/String;') instrumentedMetric('IastMetric.INSTRUMENTED_SOURCE') { - metricStatements('IastMetricCollector.add(IastMetric.INSTRUMENTED_SOURCE, "http.request.header.name", 1);') + metricStatements('IastMetricCollector.add(IastMetric.INSTRUMENTED_SOURCE, (byte) 3, 1);') } executedMetric('IastMetric.EXECUTED_SOURCE') { metricStatements( 'handler.field(net.bytebuddy.jar.asm.Opcodes.GETSTATIC, "datadog/trace/api/iast/telemetry/IastMetric", "EXECUTED_SOURCE", "Ldatadog/trace/api/iast/telemetry/IastMetric;");', - 'handler.loadConstant("http.request.header.name");', + 'handler.instruction(net.bytebuddy.jar.asm.Opcodes.ICONST_3);', 'handler.instruction(net.bytebuddy.jar.asm.Opcodes.ICONST_1);', - 'handler.method(net.bytebuddy.jar.asm.Opcodes.INVOKESTATIC, "datadog/trace/api/iast/telemetry/IastMetricCollector", "add", "(Ldatadog/trace/api/iast/telemetry/IastMetric;Ljava/lang/String;I)V", false);' + 'handler.method(net.bytebuddy.jar.asm.Opcodes.INVOKESTATIC, "datadog/trace/api/iast/telemetry/IastMetricCollector", "add", "(Ldatadog/trace/api/iast/telemetry/IastMetric;BI)V", false);' ) } } advices(1) { + pointcut('javax/servlet/http/HttpServletRequest', 'getInputStream', '()Ljavax/servlet/ServletInputStream;') + instrumentedMetric('IastMetric.INSTRUMENTED_SOURCE') { + metricStatements('IastMetricCollector.add(IastMetric.INSTRUMENTED_SOURCE, (byte) 127, 1);') + } + executedMetric('IastMetric.EXECUTED_SOURCE') { + metricStatements( + 'handler.field(net.bytebuddy.jar.asm.Opcodes.GETSTATIC, "datadog/trace/api/iast/telemetry/IastMetric", "EXECUTED_SOURCE", "Ldatadog/trace/api/iast/telemetry/IastMetric;");', + 'handler.instruction(net.bytebuddy.jar.asm.Opcodes.BIPUSH, 127);', + 'handler.instruction(net.bytebuddy.jar.asm.Opcodes.ICONST_1);', + 'handler.method(net.bytebuddy.jar.asm.Opcodes.INVOKESTATIC, "datadog/trace/api/iast/telemetry/IastMetricCollector", "add", "(Ldatadog/trace/api/iast/telemetry/IastMetric;BI)V", false);' + ) + } + } + advices(2) { pointcut('javax/servlet/ServletRequest', 'getReader', '()Ljava/io/BufferedReader;') instrumentedMetric('IastMetric.INSTRUMENTED_PROPAGATION') { metricStatements('IastMetricCollector.add(IastMetric.INSTRUMENTED_PROPAGATION, 1);') diff --git a/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/IastExtensionCallSite.java b/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/IastExtensionCallSite.java index 6c3c133a7c1..34bc4a5be9d 100644 --- a/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/IastExtensionCallSite.java +++ b/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/IastExtensionCallSite.java @@ -2,13 +2,14 @@ import datadog.trace.agent.tooling.csi.CallSite; import java.io.BufferedReader; +import javax.servlet.ServletInputStream; import javax.servlet.ServletRequest; import javax.servlet.http.HttpServletRequest; @CallSite(spi = IastCallSites.class) public class IastExtensionCallSite { - @Source(SourceTypes.REQUEST_HEADER_NAME_STRING) + @Source(SourceTypes.REQUEST_HEADER_NAME) @CallSite.After( "java.lang.String javax.servlet.http.HttpServletRequest.getHeader(java.lang.String)") public static String afterGetHeader( @@ -18,6 +19,15 @@ public static String afterGetHeader( return headerValue; } + @Source(SourceTypes.REQUEST_BODY) + @CallSite.After( + "javax.servlet.ServletInputStream javax.servlet.http.HttpServletRequest.getInputStream()") + public static ServletInputStream afterGetInputStream( + @CallSite.This final HttpServletRequest self, + @CallSite.Return final ServletInputStream stream) { + return stream; + } + @Propagation @CallSite.After("java.io.BufferedReader javax.servlet.ServletRequest.getReader()") public static BufferedReader afterGetReader( diff --git a/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/Source.java b/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/Source.java index 9a9a977ae30..5cb5503fd9a 100644 --- a/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/Source.java +++ b/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/Source.java @@ -9,5 +9,5 @@ @Retention(RetentionPolicy.RUNTIME) public @interface Source { /** Source type */ - String value(); + byte value(); } diff --git a/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/SourceTypes.java b/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/SourceTypes.java index 0f5fae0cb85..e5e2d68f773 100644 --- a/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/SourceTypes.java +++ b/buildSrc/call-site-instrumentation-plugin/src/test/java/datadog/trace/plugin/csi/impl/ext/tests/SourceTypes.java @@ -2,5 +2,7 @@ public class SourceTypes { - public static final String REQUEST_HEADER_NAME_STRING = "http.request.header.name"; + public static final byte REQUEST_HEADER_NAME = 3; + + public static final byte REQUEST_BODY = 127; } diff --git a/buildSrc/src/main/groovy/InstrumentPlugin.groovy b/buildSrc/src/main/groovy/InstrumentPlugin.groovy index d19514e6543..34e41e48faa 100644 --- a/buildSrc/src/main/groovy/InstrumentPlugin.groovy +++ b/buildSrc/src/main/groovy/InstrumentPlugin.groovy @@ -116,7 +116,9 @@ abstract class InstrumentTask extends DefaultTask { parameters.buildStartedTime.set(invocationDetails.buildStartedTime) parameters.pluginClassPath.setFrom(project.configurations.findByName('instrumentPluginClasspath') ?: []) parameters.plugins.set(extension.plugins) - parameters.instrumentingClassPath.setFrom(project.configurations.compileClasspath.findAll { + def matcher = instrumentTask.name =~ /instrument([A-Z].+)Java/ + def cfgName = matcher.matches() ? "${matcher.group(1).uncapitalize()}CompileClasspath" : 'compileClasspath' + parameters.instrumentingClassPath.setFrom(project.configurations[cfgName].findAll { it.name != 'previous-compilation-data.bin' && !it.name.endsWith(".gz") } + sourceDirectory + (extension.additionalClasspath[instrumentTask.name] ?: [])*.get()) parameters.sourceDirectory.set(sourceDirectory.asFile) diff --git a/buildSrc/src/main/groovy/MuzzlePlugin.groovy b/buildSrc/src/main/groovy/MuzzlePlugin.groovy index ebbc701a669..51f92feaa4f 100644 --- a/buildSrc/src/main/groovy/MuzzlePlugin.groovy +++ b/buildSrc/src/main/groovy/MuzzlePlugin.groovy @@ -1,3 +1,5 @@ +import static MuzzleAction.createClassLoader + import org.apache.maven.repository.internal.MavenRepositorySystemUtils import org.eclipse.aether.DefaultRepositorySystemSession import org.eclipse.aether.RepositorySystem @@ -13,6 +15,7 @@ import org.eclipse.aether.resolution.VersionRangeResult import org.eclipse.aether.spi.connector.RepositoryConnectorFactory import org.eclipse.aether.spi.connector.transport.TransporterFactory import org.eclipse.aether.transport.http.HttpTransporterFactory +import org.eclipse.aether.util.version.GenericVersionScheme import org.eclipse.aether.version.Version import org.gradle.api.Action import org.gradle.api.DefaultTask @@ -34,6 +37,7 @@ import org.gradle.workers.WorkParameters import org.gradle.workers.WorkerExecutor import java.lang.reflect.Method +import java.util.function.BiFunction import java.util.regex.Pattern /** @@ -58,6 +62,26 @@ class MuzzlePlugin implements Plugin { MUZZLE_REPOS = Collections.unmodifiableList(Arrays.asList(central, restlet, typesafe)) } + static class TestedArtifact { + final String instrumentation + final String group + final String module + final Version lowVersion + final Version highVersion + + TestedArtifact(String instrumentation, String group, String module, Version lowVersion, Version highVersion) { + this.instrumentation = instrumentation + this.group = group + this.module = module + this.lowVersion = lowVersion + this.highVersion = highVersion + } + + String key() { + "$instrumentation:$group:$module" + } + } + @Override void apply(Project project) { def childProjects = project.rootProject.getChildProjects().get('dd-java-agent').getChildProjects() @@ -105,13 +129,28 @@ class MuzzlePlugin implements Plugin { dependsOn compileMuzzle } - project.task(['type': MuzzleTask],'printReferences') { + project.task(['type': MuzzleTask], 'printReferences') { description = "Print references created by instrumentation muzzle" doLast { printMuzzle(project) } dependsOn compileMuzzle } + project.task(['type': MuzzleTask], 'generateMuzzleReport') { + description = "Print instrumentation version report" + doLast { + dumpVersionRanges(project) + } + dependsOn compileMuzzle + } + + + project.task(['type': MuzzleTask], 'mergeMuzzleReports') { + description = "Merge generated version reports in one unique csv" + doLast { + mergeReports(project) + } + } def hasRelevantTask = project.gradle.startParameter.taskNames.any { taskName -> // removing leading ':' if present @@ -129,25 +168,24 @@ class MuzzlePlugin implements Plugin { final RepositorySystem system = newRepositorySystem() final RepositorySystemSession session = newRepositorySystemSession(system) - project.afterEvaluate { // use runAfter to set up task finalizers in version order Task runAfter = project.tasks.muzzle // runLast is the last task to finish, so we can time the execution Task runLast = runAfter - for (MuzzleDirective muzzleDirective : project.muzzle.directives) { project.getLogger().info("configured $muzzleDirective") if (muzzleDirective.coreJdk) { runLast = runAfter = addMuzzleTask(muzzleDirective, null, project, runAfter, muzzleBootstrap, muzzleTooling) } else { - runLast = muzzleDirectiveToArtifacts(muzzleDirective, system, session).inject(runLast) { last, Artifact singleVersion -> + def range = resolveVersionRange(muzzleDirective, system, session) + runLast = muzzleDirectiveToArtifacts(muzzleDirective, range).inject(runLast) { last, Artifact singleVersion -> runAfter = addMuzzleTask(muzzleDirective, singleVersion, project, runAfter, muzzleBootstrap, muzzleTooling) } if (muzzleDirective.assertInverse) { runLast = inverseOf(muzzleDirective, system, session).inject(runLast) { last1, MuzzleDirective inverseDirective -> - muzzleDirectiveToArtifacts(inverseDirective, system, session).inject(last1) { last2, Artifact singleVersion -> + muzzleDirectiveToArtifacts(inverseDirective, resolveVersionRange(inverseDirective, system, session)).inject(last1) { last2, Artifact singleVersion -> runAfter = addMuzzleTask(inverseDirective, singleVersion, project, runAfter, muzzleBootstrap, muzzleTooling) } } @@ -164,6 +202,85 @@ class MuzzlePlugin implements Plugin { } } + static Version highest(Version a, Version b) { + (a <=> b) > 0 ? a : b + } + + static Version lowest(Version a, Version b) { + (a <=> b) < 0 ? a : b + } + + static Map resolveInstrumentationAndJarVersions(MuzzleDirective directive, ClassLoader cl, + Version lowVersion, Version highVersion) { + + Method listMethod = cl.loadClass('datadog.trace.agent.tooling.muzzle.MuzzleVersionScanPlugin') + .getMethod('listInstrumentationNames', ClassLoader.class, String.class) + + Set names = (Set) listMethod.invoke(null, cl, directive.getName()) + Map ret = [:] + for (String n : names) { + def testedArtifact = new TestedArtifact(n, directive.group, directive.module, lowVersion, highVersion) + def value = ret.get(testedArtifact.key(), testedArtifact) + ret.put(testedArtifact.key(), new TestedArtifact(value.instrumentation, value.group, value.module, lowest(lowVersion, value.lowVersion), + highest(highVersion, value.highVersion))) + } + return ret + } + + private static void mergeReports(Project project) { + def dir = project.file("${project.rootProject.buildDir}/muzzle-deps-results") + Map map = new TreeMap<>() + def versionScheme = new GenericVersionScheme() + dir.eachFileMatch(~/.*\.csv/) { file -> + file.eachLine { line, nb -> + if (nb == 1) { + // skip header + return + } + def split = line.split(",") + def parsed = new TestedArtifact(split[0], split[1], split[2], versionScheme.parseVersion(split[3]), + versionScheme.parseVersion(split[4])) + map.merge(parsed.key(), parsed, [ + apply: { TestedArtifact x, TestedArtifact y -> + return new TestedArtifact(x.instrumentation, x.group, x.module, lowest(x.lowVersion, y.lowVersion), highest(x.highVersion, y.highVersion)) + } + ] as BiFunction) + } + } + dumpVersionsToCsv(project, map) + } + + + private static void dumpVersionRanges(Project project) { + final RepositorySystem system = newRepositorySystem() + final RepositorySystemSession session = newRepositorySystemSession(system) + def versions = new TreeMap() + project.muzzle.directives.findAll { !((MuzzleDirective) it).isCoreJdk() }.each { + def range = resolveVersionRange(it as MuzzleDirective, system, session) + def cp = project.sourceSets.main.runtimeClasspath + def cl = new URLClassLoader(cp*.toURI()*.toURL() as URL[], null as ClassLoader) + def partials = resolveInstrumentationAndJarVersions(it as MuzzleDirective, cl, + range.lowestVersion, range.highestVersion) + partials.each { + versions.merge(it.getKey(), it.getValue(), [ + apply: { TestedArtifact x, TestedArtifact y -> + return new TestedArtifact(x.instrumentation, x.group, x.module, lowest(x.lowVersion, y.lowVersion), highest(x.highVersion, y.highVersion)) + } + ] as BiFunction) + } + } + dumpVersionsToCsv(project, versions) + } + + private static void dumpVersionsToCsv(Project project, SortedMap versions) { + def filename = project.path.replaceFirst('^:', '').replace(':', '_') + def dir = project.file("${project.rootProject.buildDir}/muzzle-deps-results") + dir.mkdirs() + def file = project.file("${dir}/${filename}.csv") + file.write "instrumentation,jarGroupId,jarArtifactId,lowestVersion,highestVersion\n" + file << versions.values().collect { [it.instrumentation, it.group, it.module, it.lowVersion.toString(), it.highVersion.toString()].join(",") }.join("\n") + } + private static void generateResultsXML(Project project, long millis) { def seconds = (millis * 1.0) / 1000 def name = "${project.path}:muzzle" @@ -206,27 +323,28 @@ class MuzzlePlugin implements Plugin { return cp } - /** - * Convert a muzzle directive to a list of artifacts - */ - private static Set muzzleDirectiveToArtifacts(MuzzleDirective muzzleDirective, RepositorySystem system, RepositorySystemSession session) { - final Artifact directiveArtifact = new DefaultArtifact(muzzleDirective.group, muzzleDirective.module, "jar", muzzleDirective.versions) + static VersionRangeResult resolveVersionRange(MuzzleDirective muzzleDirective, RepositorySystem system, RepositorySystemSession session) { + final Artifact directiveArtifact = new DefaultArtifact(muzzleDirective.group, muzzleDirective.module, muzzleDirective.classifier ?: "", "jar", muzzleDirective.versions) final VersionRangeRequest rangeRequest = new VersionRangeRequest() rangeRequest.setRepositories(muzzleDirective.getRepositories(MUZZLE_REPOS)) rangeRequest.setArtifact(directiveArtifact) - final VersionRangeResult rangeResult = system.resolveVersionRange(session, rangeRequest) - final Set versions = filterAndLimitVersions(rangeResult, muzzleDirective.skipVersions) + return system.resolveVersionRange(session, rangeRequest) + } + + /** + * Convert a muzzle directive to a list of artifacts + */ + private static Set muzzleDirectiveToArtifacts(MuzzleDirective muzzleDirective, VersionRangeResult rangeResult) { -// println "Range Request: " + rangeRequest -// println "Range Result: " + rangeResult + final Set versions = filterAndLimitVersions(rangeResult, muzzleDirective.skipVersions) final Set allVersionArtifacts = versions.collect { version -> new DefaultArtifact(muzzleDirective.group, muzzleDirective.module, muzzleDirective.classifier ?: "", "jar", version.toString()) }.toSet() if (allVersionArtifacts.isEmpty()) { - throw new GradleException("No muzzle artifacts found for $muzzleDirective.group:$muzzleDirective.module $muzzleDirective.versions") + throw new GradleException("No muzzle artifacts found for $muzzleDirective.group:$muzzleDirective.module $muzzleDirective.versions $muzzleDirective.classifier") } return allVersionArtifacts @@ -317,7 +435,11 @@ class MuzzlePlugin implements Plugin { def config = instrumentationProject.configurations.create(taskName) if (!muzzleDirective.coreJdk) { - def dep = instrumentationProject.dependencies.create("$versionArtifact.groupId:$versionArtifact.artifactId:$versionArtifact.version") { + def depId = "$versionArtifact.groupId:$versionArtifact.artifactId:$versionArtifact.version" + if (versionArtifact.classifier) { + depId += ":" + versionArtifact.classifier + } + def dep = instrumentationProject.dependencies.create(depId) { transitive = true } // The following optional transitive dependencies are brought in by some legacy module such as log4j 1.x but are no @@ -575,8 +697,7 @@ abstract class MuzzleTask extends DefaultTask { void assertMuzzle(Configuration muzzleBootstrap, Configuration muzzleTooling, Project instrumentationProject, - MuzzleDirective muzzleDirective = null) - { + MuzzleDirective muzzleDirective = null) { def workQueue String javaVersion = muzzleDirective?.javaVersion if (javaVersion) { @@ -617,11 +738,17 @@ abstract class MuzzleTask extends DefaultTask { interface MuzzleWorkParameters extends WorkParameters { Property getBuildStartedTime() + ConfigurableFileCollection getBootstrapClassPath() + ConfigurableFileCollection getToolingClassPath() + ConfigurableFileCollection getInstrumentationClassPath() + ConfigurableFileCollection getTestApplicationClassPath() + Property getAssertPass() + Property getMuzzleDirective() } diff --git a/buildSrc/src/test/groovy/InstrumentPluginTest.groovy b/buildSrc/src/test/groovy/InstrumentPluginTest.groovy index 668eefb77e6..a1a3ec6b1eb 100644 --- a/buildSrc/src/test/groovy/InstrumentPluginTest.groovy +++ b/buildSrc/src/test/groovy/InstrumentPluginTest.groovy @@ -23,7 +23,7 @@ class InstrumentPluginTest extends Specification { } dependencies { - compileOnly group: 'net.bytebuddy', name: 'byte-buddy', version: '1.14.5' // just to build TestPlugin + compileOnly group: 'net.bytebuddy', name: 'byte-buddy', version: '1.14.8' // just to build TestPlugin } apply plugin: 'instrument' diff --git a/communication/build.gradle b/communication/build.gradle index 5d3cf618546..9c4ce6011f3 100644 --- a/communication/build.gradle +++ b/communication/build.gradle @@ -9,6 +9,7 @@ dependencies { implementation project(':utils:socket-utils') implementation project(':utils:version-utils') + api deps.okio api deps.okhttp api group: 'com.squareup.moshi', name: 'moshi', version: versions.moshi implementation group: 'com.datadoghq', name: 'java-dogstatsd-client', version: "${versions.dogstatsd}" diff --git a/communication/src/main/java/datadog/communication/ddagent/DDAgentFeaturesDiscovery.java b/communication/src/main/java/datadog/communication/ddagent/DDAgentFeaturesDiscovery.java index 6c00b734bef..ae3c375875e 100644 --- a/communication/src/main/java/datadog/communication/ddagent/DDAgentFeaturesDiscovery.java +++ b/communication/src/main/java/datadog/communication/ddagent/DDAgentFeaturesDiscovery.java @@ -47,6 +47,8 @@ public class DDAgentFeaturesDiscovery implements DroppingPolicy { public static final String DEBUGGER_ENDPOINT = "debugger/v1/input"; + public static final String TELEMETRY_PROXY_ENDPOINT = "telemetry/proxy/"; + private static final long MIN_FEATURE_DISCOVERY_INTERVAL_MILLIS = 60 * 1000; private final OkHttpClient client; @@ -58,6 +60,7 @@ public class DDAgentFeaturesDiscovery implements DroppingPolicy { private final boolean metricsEnabled; private final String[] dataStreamsEndpoints = {V01_DATASTREAMS_ENDPOINT}; private final String[] evpProxyEndpoints = {V2_EVP_PROXY_ENDPOINT}; + private final String[] telemetryProxyEndpoints = {TELEMETRY_PROXY_ENDPOINT}; private volatile String traceEndpoint; private volatile String metricsEndpoint; @@ -69,6 +72,7 @@ public class DDAgentFeaturesDiscovery implements DroppingPolicy { private volatile String debuggerEndpoint; private volatile String evpProxyEndpoint; private volatile String version; + private volatile String telemetryProxyEndpoint; private long lastTimeDiscovered; @@ -100,6 +104,7 @@ private void reset() { evpProxyEndpoint = null; version = null; lastTimeDiscovered = 0; + telemetryProxyEndpoint = null; } /** Run feature discovery, unconditionally. */ @@ -162,14 +167,15 @@ private void doDiscovery() { if (log.isDebugEnabled()) { log.debug( - "discovered traceEndpoint={}, metricsEndpoint={}, supportsDropping={}, supportsLongRunning={}, dataStreamsEndpoint={}, configEndpoint={}, evpProxyEndpoint={}", + "discovered traceEndpoint={}, metricsEndpoint={}, supportsDropping={}, supportsLongRunning={}, dataStreamsEndpoint={}, configEndpoint={}, evpProxyEndpoint={}, telemetryProxyEndpoint={}", traceEndpoint, metricsEndpoint, supportsDropping, supportsLongRunning, dataStreamsEndpoint, + configEndpoint, evpProxyEndpoint, - configEndpoint); + telemetryProxyEndpoint); } } @@ -247,6 +253,13 @@ private boolean processInfoResponse(String response) { } } + for (String endpoint : telemetryProxyEndpoints) { + if (endpoints.contains(endpoint) || endpoints.contains("/" + endpoint)) { + telemetryProxyEndpoint = endpoint; + break; + } + } + supportsLongRunning = Boolean.TRUE.equals(map.getOrDefault("long_running_spans", false)); if (metricsEnabled) { @@ -272,6 +285,10 @@ private boolean processInfoResponse(String response) { private static void discoverStatsDPort(final Map info) { try { Map config = (Map) info.get("config"); + if (config == null) { + log.debug("config missing from trace agent /info response"); + return; + } final Object statsdPortObj = config.get("statsd_port"); if (statsdPortObj == null) { log.debug("statsd_port missing from trace agent /info response"); @@ -279,8 +296,8 @@ private static void discoverStatsDPort(final Map info) { } int statsdPort = ((Number) statsdPortObj).intValue(); DDAgentStatsDClientManager.setDefaultStatsDPort(statsdPort); - } catch (Throwable ignore) { - log.debug("statsd_port missing from trace agent /info response", ignore); + } catch (Exception ex) { + log.debug("statsd_port missing from trace agent /info response", ex); } } @@ -348,4 +365,8 @@ public String state() { public boolean active() { return supportsMetrics() && supportsDropping; } + + public boolean supportsTelemetryProxy() { + return telemetryProxyEndpoint != null; + } } diff --git a/communication/src/main/java/datadog/communication/monitor/DDAgentStatsDConnection.java b/communication/src/main/java/datadog/communication/monitor/DDAgentStatsDConnection.java index 4a9e5113d1d..0455e2ec3ad 100644 --- a/communication/src/main/java/datadog/communication/monitor/DDAgentStatsDConnection.java +++ b/communication/src/main/java/datadog/communication/monitor/DDAgentStatsDConnection.java @@ -101,9 +101,7 @@ private void doConnect() { if (log.isDebugEnabled()) { log.debug("Creating StatsD client - {}", statsDAddress()); } - // when using UDS, set "entity-id" to "none" to avoid having the DogStatsD - // server add origin tags (see https://github.com/DataDog/jmxfetch/pull/264) - String entityID = port == 0 ? "none" : null; + NonBlockingStatsDClientBuilder clientBuilder = new NonBlockingStatsDClientBuilder() .threadFactory(STATSD_CLIENT_THREAD_FACTORY) @@ -112,12 +110,53 @@ private void doConnect() { .hostname(host) .port(port) .namedPipe(namedPipe) - .errorHandler(this) - .entityID(entityID); + .errorHandler(this); + + // when using UDS, set "entity-id" to "none" to avoid having the DogStatsD + // server add origin tags (see https://github.com/DataDog/jmxfetch/pull/264) + if (this.port == 0) { + clientBuilder.constantTags("dd.internal.card:none"); + clientBuilder.entityID("none"); + } else { + clientBuilder.entityID(null); + } + + Integer queueSize = Config.get().getStatsDClientQueueSize(); + if (queueSize != null) { + clientBuilder.queueSize(queueSize); + } + // when using UDS set the datagram size to 8k (2k on Mac due to lower OS default) + // but also make sure packet size isn't larger than the configured socket buffer if (this.port == 0) { - clientBuilder.maxPacketSizeBytes(Platform.isMac() ? 2048 : 8192); + Integer timeout = Config.get().getStatsDClientSocketTimeout(); + if (timeout != null) { + clientBuilder.timeout(timeout); + } + Integer bufferSize = Config.get().getStatsDClientSocketBuffer(); + if (bufferSize != null) { + clientBuilder.socketBufferSize(bufferSize); + } + int packetSize = Platform.isMac() ? 2048 : 8192; + if (bufferSize != null && bufferSize < packetSize) { + packetSize = bufferSize; + } + clientBuilder.maxPacketSizeBytes(packetSize); + } + + if (log.isDebugEnabled()) { + if (this.port == 0) { + log.debug( + "Configured StatsD client - queueSize={}, maxPacketSize={}, socketBuffer={}, socketTimeout={}", + clientBuilder.queueSize, + clientBuilder.maxPacketSizeBytes, + clientBuilder.socketBufferSize, + clientBuilder.timeout); + } else { + log.debug("Configured StatsD client - queueSize={}", clientBuilder.queueSize); + } } + try { statsd = clientBuilder.build(); if (log.isDebugEnabled()) { diff --git a/communication/src/test/groovy/datadog/communication/ddagent/DDAgentFeaturesDiscoveryTest.groovy b/communication/src/test/groovy/datadog/communication/ddagent/DDAgentFeaturesDiscoveryTest.groovy index af9b865d1b4..70b5519d355 100644 --- a/communication/src/test/groovy/datadog/communication/ddagent/DDAgentFeaturesDiscoveryTest.groovy +++ b/communication/src/test/groovy/datadog/communication/ddagent/DDAgentFeaturesDiscoveryTest.groovy @@ -15,7 +15,6 @@ import spock.lang.Shared import java.nio.file.Files import java.nio.file.Paths -import java.util.concurrent.CountDownLatch import static datadog.communication.ddagent.DDAgentFeaturesDiscovery.V01_DATASTREAMS_ENDPOINT import static datadog.communication.ddagent.DDAgentFeaturesDiscovery.V6_METRICS_ENDPOINT @@ -38,6 +37,7 @@ class DDAgentFeaturesDiscoveryTest extends DDSpecification { static final String INFO_WITHOUT_DATA_STREAMS_RESPONSE = loadJsonFile("agent-info-without-data-streams.json") static final String INFO_WITHOUT_DATA_STREAMS_STATE = Strings.sha256(INFO_WITHOUT_DATA_STREAMS_RESPONSE) static final String INFO_WITH_LONG_RUNNING_SPANS = loadJsonFile("agent-info-with-long-running-spans.json") + static final String INFO_WITH_TELEMETRY_PROXY_RESPONSE = loadJsonFile("agent-info-with-telemetry-proxy.json") static final String PROBE_STATE = "probestate" def "test parse /info response"() { @@ -62,6 +62,7 @@ class DDAgentFeaturesDiscoveryTest extends DDSpecification { features.supportsEvpProxy() features.getVersion() == "0.99.0" !features.supportsLongRunning() + !features.supportsTelemetryProxy() 0 * _ } @@ -89,6 +90,7 @@ class DDAgentFeaturesDiscoveryTest extends DDSpecification { features.supportsEvpProxy() features.getVersion() == "0.99.0" !features.supportsLongRunning() + !features.supportsTelemetryProxy() 0 * _ } @@ -384,17 +386,22 @@ class DDAgentFeaturesDiscoveryTest extends DDSpecification { // but we don't permit dropping anyway !(features as DroppingPolicy).active() features.state() == INFO_WITHOUT_METRICS_STATE + !features.supportsTelemetryProxy() 0 * _ } - def countingNotFound(Request request, CountDownLatch latch) { - latch.countDown() - return notFound(request) - } + def "test parse /info response with telemetry proxy"() { + setup: + OkHttpClient client = Mock(OkHttpClient) + DDAgentFeaturesDiscovery features = new DDAgentFeaturesDiscovery(client, monitoring, agentUrl, true, true) - def countingInfoResponse(Request request, String json, CountDownLatch latch) { - latch.countDown() - return infoResponse(request, json) + when: "/info available" + features.discover() + + then: + 1 * client.newCall(_) >> { Request request -> infoResponse(request, INFO_WITH_TELEMETRY_PROXY_RESPONSE) } + features.supportsTelemetryProxy() + 0 * _ } def infoResponse(Request request, String json) { diff --git a/communication/src/test/resources/agent-features/agent-info-with-telemetry-proxy.json b/communication/src/test/resources/agent-features/agent-info-with-telemetry-proxy.json new file mode 100644 index 00000000000..55e493e8316 --- /dev/null +++ b/communication/src/test/resources/agent-features/agent-info-with-telemetry-proxy.json @@ -0,0 +1,62 @@ +{ + "version": "0.99.0", + "git_commit": "fab047e10", + "build_date": "2020-12-04 15:57:06.74187 +0200 EET m=+0.029001792", + "endpoints": [ + "/v0.3/traces", + "/v0.3/services", + "/v0.4/traces", + "/v0.4/services", + "/v0.5/traces", + "/v0.6/stats", + "/profiling/v1/input", + "/telemetry/proxy/", + "/v0.1/pipeline_stats", + "/evp_proxy/v1/", + "/evp_proxy/v2/", + "/debugger/v1/input", + "/v0.7/config" + ], + "feature_flags": [ + "feature_flag" + ], + "config": { + "default_env": "prod", + "bucket_interval": 1000000000, + "extra_aggregators": [ + "agg:val" + ], + "extra_sample_rate": 2.4, + "target_tps": 11, + "max_eps": 12, + "receiver_port": 8111, + "receiver_socket": "/sock/path", + "connection_limit": 12, + "receiver_timeout": 100, + "max_request_bytes": 123, + "statsd_port": 123, + "max_memory": 1000000, + "max_cpu": 12345, + "analyzed_rate_by_service_legacy": { + "X": 1.2 + }, + "analyzed_spans_by_service": { + "X": { + "Y": 2.4 + } + }, + "obfuscation": { + "elastic_search": true, + "mongo": true, + "sql_exec_plan": true, + "sql_exec_plan_normalize": true, + "http": { + "remove_query_string": true, + "remove_path_digits": true + }, + "remove_stack_traces": false, + "redis": true, + "memcached": false + } + } +} diff --git a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/Agent.java b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/Agent.java index b45f2f655d5..a2343f12474 100644 --- a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/Agent.java +++ b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/Agent.java @@ -150,7 +150,13 @@ public static void start(final Instrumentation inst, final URL agentJarURL, Stri createAgentClassloader(agentJarURL); if (Platform.isNativeImageBuilder()) { + // these default services are not used during native-image builds + jmxFetchEnabled = false; + remoteConfigEnabled = false; + telemetryEnabled = false; + // apply trace instrumentation, but skip starting other services startDatadogAgent(inst); + StaticEventLogger.end("Agent.start"); return; } @@ -335,6 +341,9 @@ public static void shutdown(final boolean sync) { if (profilingEnabled) { shutdownProfilingAgent(sync); } + if (telemetryEnabled) { + stopTelemetry(); + } } public static synchronized Class installAgentCLI() throws Exception { @@ -640,7 +649,7 @@ private static synchronized void registerDeadlockDetectionEvent() { private static synchronized void initializeJmxSystemAccessProvider( final ClassLoader classLoader) { if (log.isDebugEnabled()) { - log.debug("Initializing JMX system access provider for " + classLoader.toString()); + log.debug("Initializing JMX system access provider for {}", classLoader); } try { final Class tracerInstallerClass = @@ -781,6 +790,21 @@ private static void startTelemetry(Instrumentation inst, Class scoClass, Obje StaticEventLogger.end("Telemetry"); } + private static void stopTelemetry() { + if (AGENT_CLASSLOADER == null) { + return; + } + + try { + final Class telemetrySystem = + AGENT_CLASSLOADER.loadClass("datadog.telemetry.TelemetrySystem"); + final Method stopTelemetry = telemetrySystem.getMethod("stop"); + stopTelemetry.invoke(null); + } catch (final Throwable ex) { + log.error("Error encountered while stopping telemetry", ex); + } + } + private static void initializeCrashUploader() { if (Platform.isJ9()) { // TODO currently crash tracking is supported only for HotSpot based JVMs @@ -846,6 +870,11 @@ private static ProfilingContextIntegration createProfilingContextIntegration() { private static void startProfilingAgent(final boolean isStartingFirst) { StaticEventLogger.begin("ProfilingAgent"); + if (isAwsLambdaRuntime()) { + log.info("Profiling not supported in AWS Lambda runtimes"); + return; + } + final ClassLoader contextLoader = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(AGENT_CLASSLOADER); @@ -911,6 +940,11 @@ public void withTracer(TracerAPI tracer) { StaticEventLogger.end("ProfilingAgent"); } + private static boolean isAwsLambdaRuntime() { + String val = System.getenv("AWS_LAMBDA_FUNCTION_NAME"); + return val != null && !val.isEmpty(); + } + private static ScopeListener createScopeListener(String className) throws Throwable { return (ScopeListener) AGENT_CLASSLOADER.loadClass(className).getDeclaredConstructor().newInstance(); @@ -1036,7 +1070,7 @@ private static boolean isFeatureEnabled(AgentFeature feature) { } } - /** @see datadog.trace.api.ProductActivationConfig#fromString(String) */ + /** @see datadog.trace.api.ProductActivation#fromString(String) */ private static boolean isAppSecFullyDisabled() { // must be kept in sync with logic from Config! final String featureEnabledSysprop = AgentFeature.APPSEC.systemProp; diff --git a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/AgentJarIndex.java b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/AgentJarIndex.java index 0186fd56bdd..382f8708ac6 100644 --- a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/AgentJarIndex.java +++ b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/AgentJarIndex.java @@ -81,7 +81,7 @@ public static AgentJarIndex readIndex(JarFile agentJar) { return new AgentJarIndex(prefixes, ClassNameTrie.readFrom(in)); } } catch (Throwable e) { - log.error("Unable to read " + AGENT_INDEX_FILE_NAME, e); + log.error("Unable to read {}", AGENT_INDEX_FILE_NAME, e); return null; } } diff --git a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/blocking/BlockingActionHelper.java b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/blocking/BlockingActionHelper.java index de05e3d3619..fbcb1062eb1 100644 --- a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/blocking/BlockingActionHelper.java +++ b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/blocking/BlockingActionHelper.java @@ -211,27 +211,25 @@ public static void reset(Config config) { } private static byte[] readDefaultTemplate(String ext) { - InputStream is = + try (InputStream is = getSystemClassLoader() - .getResourceAsStream("datadog/trace/bootstrap/blocking/template." + ext); - if (is == null) { - log.error("Could not open default {} template", ext); - return new byte[] {'e', 'r', 'r', 'o', 'r'}; - } + .getResourceAsStream("datadog/trace/bootstrap/blocking/template." + ext)) { + if (is == null) { + log.error("Could not open default {} template", ext); + return new byte[] {'e', 'r', 'r', 'o', 'r'}; + } - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - byte[] b = new byte[8192]; - int read; - try { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + byte[] b = new byte[8192]; + int read; while ((read = is.read(b)) != -1) { baos.write(b, 0, read); } + return baos.toByteArray(); } catch (IOException e) { log.error("Could not read default {} template", ext, e); return new byte[] {'e', 'r', 'r', 'o', 'r'}; } - - return baos.toByteArray(); } private static byte[] readIntoByteArray(File f) { diff --git a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/AsyncResultDecorator.java b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/AsyncResultDecorator.java new file mode 100644 index 00000000000..7bebcf8fdae --- /dev/null +++ b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/AsyncResultDecorator.java @@ -0,0 +1,125 @@ +package datadog.trace.bootstrap.instrumentation.decorator; + +import static java.util.Collections.singletonList; + +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.ExecutionException; +import java.util.function.BiConsumer; + +/** + * This decorator handles asynchronous result types, finishing spans only when the async calls are + * complete. The different async types are supported using {@link AsyncResultSupportExtension} that + * should be registered using {@link #registerExtension(AsyncResultSupportExtension)} first. + */ +public abstract class AsyncResultDecorator extends BaseDecorator { + private static final CopyOnWriteArrayList EXTENSIONS = + new CopyOnWriteArrayList<>( + singletonList(new JavaUtilConcurrentAsyncResultSupportExtension())); + + private static final ClassValue EXTENSION_CLASS_VALUE = + new ClassValue() { + @Override + protected AsyncResultSupportExtension computeValue(Class type) { + return EXTENSIONS.stream() + .filter(extension -> extension.supports(type)) + .findFirst() + .orElse(null); + } + }; + + /** + * Registers an extension to add supported async types. + * + * @param extension The extension to register. + */ + public static void registerExtension(AsyncResultSupportExtension extension) { + if (extension != null) { + EXTENSIONS.add(extension); + } + } + + /** + * Look for asynchronous result and decorate it with span finisher. If the result is not + * asynchronous, it will be return unmodified and span will be finished. + * + * @param result The result to check type. + * @param span The related span to finish. + * @return An asynchronous result that will finish the span if the result is asynchronous, the + * original result otherwise. + */ + public Object wrapAsyncResultOrFinishSpan(final Object result, final AgentSpan span) { + AsyncResultSupportExtension extension; + if (result != null && (extension = EXTENSION_CLASS_VALUE.get(result.getClass())) != null) { + Object applied = extension.apply(result, span); + if (applied != null) { + return applied; + } + } + // If no extension was applied, immediately finish the span and return the original result + span.finish(); + return result; + } + + /** + * This interface defines asynchronous result type support extension. It allows deferring the + * support implementations where types are available on classpath. + */ + public interface AsyncResultSupportExtension { + /** + * Checks whether this extensions support a result type. + * + * @param result The result type to check. + * @return {@code true} if the type is supported by this extension, {@code false} otherwise. + */ + boolean supports(Class result); + + /** + * Applies the extension to the async result. + * + * @param result The async result. + * @param span The related span. + * @return The result object to return (can be the original result if not modified), or {@code + * null} if the extension could not be applied. + */ + Object apply(Object result, AgentSpan span); + } + + private static class JavaUtilConcurrentAsyncResultSupportExtension + implements AsyncResultSupportExtension { + @Override + public boolean supports(Class result) { + return CompletableFuture.class.isAssignableFrom(result) + || CompletionStage.class.isAssignableFrom(result); + } + + @Override + public Object apply(Object result, AgentSpan span) { + if (result instanceof CompletableFuture) { + CompletableFuture completableFuture = (CompletableFuture) result; + if (!completableFuture.isDone() && !completableFuture.isCancelled()) { + return completableFuture.whenComplete(finishSpan(span)); + } + } else if (result instanceof CompletionStage) { + CompletionStage completionStage = (CompletionStage) result; + return completionStage.whenComplete(finishSpan(span)); + } + return null; + } + + private BiConsumer finishSpan(AgentSpan span) { + return (o, throwable) -> { + if (throwable != null) { + span.addThrowable( + throwable instanceof ExecutionException || throwable instanceof CompletionException + ? throwable.getCause() + : throwable); + } + span.finish(); + }; + } + } +} diff --git a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/DatabaseClientDecorator.java b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/DatabaseClientDecorator.java index 6f967d1cfb4..03a8af08e5d 100644 --- a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/DatabaseClientDecorator.java +++ b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/DatabaseClientDecorator.java @@ -21,7 +21,7 @@ protected static class NamingEntry { private NamingEntry(String rawDbType) { final NamingSchema.ForDatabase schema = SpanNaming.instance().namingSchema().database(); this.dbType = schema.normalizedName(rawDbType); - this.service = schema.service(Config.get().getServiceName(), dbType); + this.service = schema.service(dbType); this.operation = UTF8BytesString.create(schema.operation(dbType)); } @@ -72,6 +72,10 @@ public AgentSpan onConnection(final AgentSpan span, final CONNECTION connection) CharSequence hostName = dbHostname(connection); if (hostName != null) { span.setTag(Tags.PEER_HOSTNAME, hostName); + + if (Config.get().isDbClientSplitByHost()) { + span.setServiceName(hostName.toString()); + } } } return span; diff --git a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/HttpServerDecorator.java b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/HttpServerDecorator.java index 3991a262c25..334b2d665ba 100644 --- a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/HttpServerDecorator.java +++ b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/HttpServerDecorator.java @@ -143,7 +143,7 @@ public AgentSpan startSpan( } AgentPropagation.ContextVisitor getter = getter(); if (null != carrier && null != getter) { - tracer().setDataStreamCheckpoint(span, SERVER_PATHWAY_EDGE_TAGS, 0); + tracer().getDataStreamsMonitoring().setCheckpoint(span, SERVER_PATHWAY_EDGE_TAGS, 0, 0); } return span; } @@ -250,7 +250,7 @@ public AgentSpan onRequest( } String inferredAddressStr = null; - if (clientIpResolverEnabled) { + if (clientIpResolverEnabled && context != null) { InetAddress inferredAddress = ClientIpAddressResolver.resolve(context, span); // the peer address should be used if: // 1. the headers yield nothing, regardless of whether it is public or not @@ -269,6 +269,17 @@ public AgentSpan onRequest( inferredAddressStr = inferredAddress.getHostAddress(); span.setTag(Tags.HTTP_CLIENT_IP, inferredAddressStr); } + } else if (clientIpResolverEnabled && span.getLocalRootSpan() != span) { + // in this case context == null + // If there is no context we can't do anything but use the peer addr. + // Additionally, context == null arises on subspans for which the resolution + // likely already happened on the top span, so we don't need to do the resolution + // again. Instead, copy from the top span, should it exist + AgentSpan localRootSpan = span.getLocalRootSpan(); + Object clientIp = localRootSpan.getTag(Tags.HTTP_CLIENT_IP); + if (clientIp != null) { + span.setTag(Tags.HTTP_CLIENT_IP, clientIp); + } } if (peerIp != null) { @@ -401,20 +412,22 @@ public AgentSpan onError(final AgentSpan span, final Throwable throwable) { } private Flow callIGCallbackRequestHeaders(AgentSpan span, REQUEST_CARRIER carrier) { - CallbackProvider cbp = tracer().getCallbackProvider(RequestContextSlot.APPSEC); + CallbackProvider cbp = tracer().getUniversalCallbackProvider(); RequestContext requestContext = span.getRequestContext(); AgentPropagation.ContextVisitor getter = getter(); - if (requestContext == null || cbp == null || getter == null) { + if (requestContext == null || getter == null) { return Flow.ResultFlow.empty(); } - IGKeyClassifier igKeyClassifier = - IGKeyClassifier.create( - requestContext, - cbp.getCallback(EVENTS.requestHeader()), - cbp.getCallback(EVENTS.requestHeaderDone())); - if (null != igKeyClassifier) { - getter.forEachKey(carrier, igKeyClassifier); - return igKeyClassifier.done(); + if (cbp != null) { + IGKeyClassifier igKeyClassifier = + IGKeyClassifier.create( + requestContext, + cbp.getCallback(EVENTS.requestHeader()), + cbp.getCallback(EVENTS.requestHeaderDone())); + if (null != igKeyClassifier) { + getter.forEachKey(carrier, igKeyClassifier); + return igKeyClassifier.done(); + } } return Flow.ResultFlow.empty(); } diff --git a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/java/concurrent/QueueTimerHelper.java b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/java/concurrent/QueueTimerHelper.java index 2e56354b1e2..2123ba369d4 100644 --- a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/java/concurrent/QueueTimerHelper.java +++ b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/java/concurrent/QueueTimerHelper.java @@ -1,36 +1,26 @@ package datadog.trace.bootstrap.instrumentation.java.concurrent; -import static datadog.trace.bootstrap.TaskWrapper.getUnwrappedType; - import datadog.trace.api.profiling.QueueTiming; import datadog.trace.api.profiling.Timer; import datadog.trace.bootstrap.ContextStore; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; +import datadog.trace.bootstrap.instrumentation.jfr.InstrumentationBasedProfiling; public class QueueTimerHelper { public static void startQueuingTimer( ContextStore taskContextStore, Class schedulerClass, T task) { State state = taskContextStore.get(task); - if (task != null && state != null) { - QueueTiming timing = - (QueueTiming) AgentTracer.get().getTimer().start(Timer.TimerType.QUEUEING); - timing.setTask(getUnwrappedType(task)); - timing.setScheduler(schedulerClass); - state.setTiming(timing); - } - } - - public static Class unwrap(T task) { - return getUnwrappedType(task); + startQueuingTimer(state, schedulerClass, task); } - public static void startQueuingTimer( - State state, Class schedulerClass, Class unwrappedTaskClass, T task) { - if (task != null) { + public static void startQueuingTimer(State state, Class schedulerClass, Object task) { + // avoid calling this before JFR is initialised because it will lead to reading the wrong + // TSC frequency before JFR has set it up properly + if (task != null && state != null && InstrumentationBasedProfiling.isJFRReady()) { QueueTiming timing = (QueueTiming) AgentTracer.get().getTimer().start(Timer.TimerType.QUEUEING); - timing.setTask(unwrappedTaskClass); + timing.setTask(task); timing.setScheduler(schedulerClass); state.setTiming(timing); } diff --git a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/jdbc/JDBCConnectionUrlParser.java b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/jdbc/JDBCConnectionUrlParser.java index 9013329c6d6..e27b268fbcf 100644 --- a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/jdbc/JDBCConnectionUrlParser.java +++ b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/jdbc/JDBCConnectionUrlParser.java @@ -747,6 +747,21 @@ DBInfo.Builder doParse(final String jdbcUrl, final DBInfo.Builder builder) { builder.host(details.substring(0, hostEndLoc)); + return builder; + } + }, + + REDSHIFT("redshift") { + @Override + DBInfo.Builder doParse(String jdbcUrl, DBInfo.Builder builder) { + builder = GENERIC_URL_LIKE.doParse(jdbcUrl, builder); + final DBInfo dbInfo = builder.build(); + if (dbInfo.getHost() != null) { + int firstDotLoc = dbInfo.getHost().indexOf('.'); + if (firstDotLoc > 0) { + builder.instance(dbInfo.getHost().substring(0, firstDotLoc)); + } + } return builder; } }; @@ -868,7 +883,7 @@ private static void populateStandardProperties( try { builder.port(Integer.parseInt(portNumber)); } catch (final NumberFormatException e) { - ExceptionLogger.LOGGER.debug("Error parsing portnumber property: " + portNumber, e); + ExceptionLogger.LOGGER.debug("Error parsing portnumber property: {}", portNumber, e); } } @@ -877,7 +892,7 @@ private static void populateStandardProperties( try { builder.port(Integer.parseInt(portNumber)); } catch (final NumberFormatException e) { - ExceptionLogger.LOGGER.debug("Error parsing portNumber property: " + portNumber, e); + ExceptionLogger.LOGGER.debug("Error parsing portNumber property: {}", portNumber, e); } } } diff --git a/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/messaging/DatadogAttributeParser.java b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/messaging/DatadogAttributeParser.java new file mode 100644 index 00000000000..37c6b6e868b --- /dev/null +++ b/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/messaging/DatadogAttributeParser.java @@ -0,0 +1,66 @@ +package datadog.trace.bootstrap.instrumentation.messaging; + +import static java.nio.charset.StandardCharsets.UTF_8; + +import datadog.trace.api.Config; +import datadog.trace.bootstrap.instrumentation.api.AgentPropagation; +import java.nio.ByteBuffer; +import java.util.Base64; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Parses trace context from an embedded '_datadog' message attribute. */ +public final class DatadogAttributeParser { + private static final Logger log = LoggerFactory.getLogger(DatadogAttributeParser.class); + + private static final Base64.Decoder BASE_64 = Base64.getDecoder(); + + /** Parses trace context properties from the given JSON and passes them to the classifier. */ + public static void forEachProperty(AgentPropagation.KeyClassifier classifier, String json) { + if (null == json) { + return; + } + try { + if (acceptJsonProperty(classifier, json, "x-datadog-trace-id")) { + acceptJsonProperty(classifier, json, "x-datadog-parent-id"); + acceptJsonProperty(classifier, json, "x-datadog-sampling-priority"); + } + if (Config.get().isDataStreamsEnabled()) { + acceptJsonProperty(classifier, json, "dd-pathway-ctx-base64"); + } + } catch (Exception e) { + log.debug("Problem extracting _datadog context", e); + } + } + + /** Parses trace context properties from the given JSON and passes them to the classifier. */ + public static void forEachProperty(AgentPropagation.KeyClassifier classifier, ByteBuffer json) { + if (null == json) { + return; + } + try { + forEachProperty(classifier, UTF_8.decode(BASE_64.decode(json)).toString()); + } catch (Exception e) { + log.debug("Problem decoding _datadog context", e); + } + } + + // Simple parser that assumes values are JSON strings that don't contain escaped quotes + private static boolean acceptJsonProperty( + AgentPropagation.KeyClassifier classifier, String json, String key) { + int keyStart = json.indexOf(key); + if (keyStart > 0) { + int separator = json.indexOf(':', keyStart + key.length()); + if (separator > 0) { + int valueStart = json.indexOf('"', separator + 1); + if (valueStart > 0) { + int valueEnd = json.indexOf('"', valueStart + 1); + if (valueEnd > 0) { + return classifier.accept(key, json.substring(valueStart + 1, valueEnd)); + } + } + } + } + return false; + } +} diff --git a/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/WindowSampler.java b/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/WindowSampler.java index f48afc17856..f6489a29c2f 100644 --- a/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/WindowSampler.java +++ b/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/WindowSampler.java @@ -13,10 +13,14 @@ public class WindowSampler { protected WindowSampler( Duration windowDuration, int samplesPerWindow, int lookback, Class eventType) { - sampler = new AdaptiveSampler(windowDuration, samplesPerWindow, lookback, 16); + sampler = new AdaptiveSampler(windowDuration, samplesPerWindow, lookback, 16, false); sampleType = EventType.getEventType(eventType); } + public void start() { + sampler.start(); + } + public boolean sample() { return sampleType.isEnabled() && sampler.sample(); } diff --git a/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionProfiling.java b/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionProfiling.java index 0cabe247278..d147017aedd 100644 --- a/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionProfiling.java +++ b/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionProfiling.java @@ -42,13 +42,17 @@ private ExceptionProfiling(final Config config) { this.recordExceptionMessage = recordExceptionMessage; } - public ExceptionSampleEvent process(final Throwable t, final int stackDepth) { + public void start() { + sampler.start(); + } + + public ExceptionSampleEvent process(final Throwable t) { // always record the exception in histogram final boolean firstHit = histogram.record(t); final boolean sampled = sampler.sample(); if (firstHit || sampled) { - return new ExceptionSampleEvent(t, stackDepth, sampled, firstHit); + return new ExceptionSampleEvent(t, sampled, firstHit); } return null; } diff --git a/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionSampleEvent.java b/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionSampleEvent.java index 57de3176207..7dfbeaedb01 100644 --- a/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionSampleEvent.java +++ b/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionSampleEvent.java @@ -18,10 +18,6 @@ public class ExceptionSampleEvent extends Event implements ContextualEvent { @Label("Exception message") private final String message; - /** JFR may truncate the stack trace - so store original length as well. */ - @Label("Exception stackdepth") - private final int stackDepth; - @Label("Sampled") private final boolean sampled; @@ -34,8 +30,7 @@ public class ExceptionSampleEvent extends Event implements ContextualEvent { @Label("Span Id") private long spanId; - public ExceptionSampleEvent( - Throwable e, final int stackDepth, boolean sampled, boolean firstOccurrence) { + public ExceptionSampleEvent(Throwable e, boolean sampled, boolean firstOccurrence) { /* * TODO: we should have some tests for this class. * Unfortunately at the moment this is not easily possible because we cannot build tests with groovy that @@ -44,7 +39,6 @@ public ExceptionSampleEvent( */ this.type = e.getClass().getName(); this.message = getMessage(e); - this.stackDepth = stackDepth; this.sampled = sampled; this.firstOccurrence = firstOccurrence; captureContext(); diff --git a/dd-java-agent/agent-bootstrap/src/main/resources/META-INF/native-image/com.datadoghq/dd-java-agent/reflect-config.json b/dd-java-agent/agent-bootstrap/src/main/resources/META-INF/native-image/com.datadoghq/dd-java-agent/reflect-config.json index dc208ba30b5..bf4992ce635 100644 --- a/dd-java-agent/agent-bootstrap/src/main/resources/META-INF/native-image/com.datadoghq/dd-java-agent/reflect-config.json +++ b/dd-java-agent/agent-bootstrap/src/main/resources/META-INF/native-image/com.datadoghq/dd-java-agent/reflect-config.json @@ -29,6 +29,12 @@ {"name": "valueOf", "parameterTypes": ["java.lang.String"]} ] }, + { + "name" : "java.util.concurrent.ConcurrentHashMap", + "methods": [ + {"name": "", "parameterTypes": []} + ] + }, { "name" : "datadog.trace.agent.common.sampling.SpanSamplingRules$RuleAdapter", "methods": [ @@ -83,5 +89,17 @@ {"name": "consumerIndex", "allowUnsafeAccess": true}, {"name": "blocked", "allowUnsafeAccess": true} ] + }, + { + "name" : "org.jctools.queues.SpscArrayQueueProducerIndexFields", + "fields": [ + {"name": "producerIndex", "allowUnsafeAccess": true} + ] + }, + { + "name" : "org.jctools.queues.SpscArrayQueueConsumerIndexField", + "fields": [ + {"name": "consumerIndex", "allowUnsafeAccess": true} + ] } ] diff --git a/dd-java-agent/agent-bootstrap/src/test/groovy/datadog/trace/bootstrap/instrumentation/decorator/DatabaseClientDecoratorTest.groovy b/dd-java-agent/agent-bootstrap/src/test/groovy/datadog/trace/bootstrap/instrumentation/decorator/DatabaseClientDecoratorTest.groovy index de3d8f0291d..0269f51d5a8 100644 --- a/dd-java-agent/agent-bootstrap/src/test/groovy/datadog/trace/bootstrap/instrumentation/decorator/DatabaseClientDecoratorTest.groovy +++ b/dd-java-agent/agent-bootstrap/src/test/groovy/datadog/trace/bootstrap/instrumentation/decorator/DatabaseClientDecoratorTest.groovy @@ -4,6 +4,7 @@ import datadog.trace.api.DDTags import datadog.trace.bootstrap.instrumentation.api.AgentSpan import datadog.trace.bootstrap.instrumentation.api.Tags +import static datadog.trace.api.config.TraceInstrumentationConfig.DB_CLIENT_HOST_SPLIT_BY_HOST import static datadog.trace.api.config.TraceInstrumentationConfig.DB_CLIENT_HOST_SPLIT_BY_INSTANCE import static datadog.trace.api.config.TraceInstrumentationConfig.DB_CLIENT_HOST_SPLIT_BY_INSTANCE_TYPE_SUFFIX @@ -35,8 +36,9 @@ class DatabaseClientDecoratorTest extends ClientDecoratorTest { def "test onConnection"() { setup: - injectSysConfig(DB_CLIENT_HOST_SPLIT_BY_INSTANCE, "$renameService") - injectSysConfig(DB_CLIENT_HOST_SPLIT_BY_INSTANCE_TYPE_SUFFIX, "$typeSuffix") + injectSysConfig(DB_CLIENT_HOST_SPLIT_BY_INSTANCE, "$renameByInstance") + injectSysConfig(DB_CLIENT_HOST_SPLIT_BY_INSTANCE_TYPE_SUFFIX, "$instanceTypeSuffix") + injectSysConfig(DB_CLIENT_HOST_SPLIT_BY_HOST, "$renameByHost") def decorator = newDecorator() when: @@ -49,24 +51,34 @@ class DatabaseClientDecoratorTest extends ClientDecoratorTest { if (session.hostname != null) { 1 * span.setTag(Tags.PEER_HOSTNAME, session.hostname) } - if (typeSuffix && renameService && session.instance) { + if (instanceTypeSuffix && renameByInstance && session.instance) { 1 * span.setServiceName(session.instance + "-" + decorator.dbType()) - } else if (renameService && session.instance) { + } else if (renameByInstance && session.instance) { 1 * span.setServiceName(session.instance) + } else if (renameByHost) { + 1 * span.setServiceName(session.hostname) } } 0 * _ where: - renameService | typeSuffix | session - false | false | null - true | false | [user: "test-user", hostname: "test-hostname"] - false | false | [instance: "test-instance", hostname: "test-hostname"] - true | false | [user: "test-user", instance: "test-instance"] - false | true | null - true | true | [user: "test-user", hostname: "test-hostname"] - false | true | [instance: "test-instance", hostname: "test-hostname"] - true | true | [user: "test-user", instance: "test-instance"] + renameByInstance | instanceTypeSuffix | renameByHost | session + false | false | false | null + true | false | false | [user: "test-user", hostname: "test-hostname"] + false | false | false | [instance: "test-instance", hostname: "test-hostname"] + true | false | false | [user: "test-user", instance: "test-instance"] + false | true | false | null + true | true | false | [user: "test-user", hostname: "test-hostname"] + false | true | false | [instance: "test-instance", hostname: "test-hostname"] + true | true | false | [user: "test-user", instance: "test-instance"] + false | false | true | null + true | false | true | [user: "test-user", hostname: "test-hostname"] + false | false | true | [instance: "test-instance", hostname: "test-hostname"] + true | false | true | [user: "test-user", instance: "test-instance"] + false | true | true | null + true | true | true | [user: "test-user", hostname: "test-hostname"] + false | true | true | [instance: "test-instance", hostname: "test-hostname"] + true | true | true | [user: "test-user", instance: "test-instance"] } def "test onStatement"() { diff --git a/dd-java-agent/agent-bootstrap/src/test/groovy/datadog/trace/bootstrap/instrumentation/decorator/HttpServerDecoratorTest.groovy b/dd-java-agent/agent-bootstrap/src/test/groovy/datadog/trace/bootstrap/instrumentation/decorator/HttpServerDecoratorTest.groovy index 5bb4828a985..fcbba105a5a 100644 --- a/dd-java-agent/agent-bootstrap/src/test/groovy/datadog/trace/bootstrap/instrumentation/decorator/HttpServerDecoratorTest.groovy +++ b/dd-java-agent/agent-bootstrap/src/test/groovy/datadog/trace/bootstrap/instrumentation/decorator/HttpServerDecoratorTest.groovy @@ -18,10 +18,12 @@ import datadog.trace.bootstrap.instrumentation.api.ResourceNamePriorities import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.bootstrap.instrumentation.api.URIDataAdapter import datadog.trace.bootstrap.instrumentation.api.URIDefaultDataAdapter +import datadog.trace.core.datastreams.DataStreamsMonitoring import java.util.function.Function import java.util.function.Supplier +import static datadog.trace.api.config.TraceInstrumentationConfig.HTTP_SERVER_DECODED_RESOURCE_PRESERVE_SPACES import static datadog.trace.api.config.TraceInstrumentationConfig.HTTP_SERVER_RAW_QUERY_STRING import static datadog.trace.api.config.TraceInstrumentationConfig.HTTP_SERVER_RAW_RESOURCE import static datadog.trace.api.config.TraceInstrumentationConfig.HTTP_SERVER_TAG_QUERY_STRING @@ -62,6 +64,7 @@ class HttpServerDecoratorTest extends ServerDecoratorTest { } else { 1 * this.span.getRequestContext() } + _ * this.span.getLocalRootSpan() >> this.span 0 * _ where: @@ -101,6 +104,7 @@ class HttpServerDecoratorTest extends ServerDecoratorTest { 1 * this.span.setResourceName({ it as String == expectedPath }) } 1 * this.span.setTag(Tags.HTTP_METHOD, null) + _ * this.span.getLocalRootSpan() >> this.span 0 * _ where: @@ -140,18 +144,34 @@ class HttpServerDecoratorTest extends ServerDecoratorTest { 2 * this.span.getRequestContext() 1 * this.span.setResourceName({ it as String == expectedResource }, ResourceNamePriorities.HTTP_PATH_NORMALIZER) 1 * this.span.setTag(Tags.HTTP_METHOD, null) + _ * this.span.getLocalRootSpan() >> this.span 0 * _ where: rawQuery | rawResource | url | expectedUrl | expectedQuery | expectedResource - false | false | "http://host/p%20ath?query%3F?" | "http://host/p ath" | "query??" | "/path" + false | false | "http://host/p%20ath?query%3F?" | "http://host/p ath" | "query??" | "/p ath" false | true | "http://host/p%20ath?query%3F?" | "http://host/p%20ath" | "query??" | "/p%20ath" - true | false | "http://host/p%20ath?query%3F?" | "http://host/p ath" | "query%3F?" | "/path" + true | false | "http://host/p%20ath?query%3F?" | "http://host/p ath" | "query%3F?" | "/p ath" true | true | "http://host/p%20ath?query%3F?" | "http://host/p%20ath" | "query%3F?" | "/p%20ath" req = [url: url == null ? null : new URI(url)] } + void 'url handling without space preservation'() { + setup: + injectSysConfig(HTTP_SERVER_RAW_RESOURCE, 'false') + injectSysConfig(HTTP_SERVER_DECODED_RESOURCE_PRESERVE_SPACES, 'false') + def decorator = newDecorator() + + when: + decorator.onRequest(this.span, null, [url: new URI('http://host/p%20ath')], null) + + then: + 1 * this.span.setResourceName({ it as String == '/path' }, ResourceNamePriorities.HTTP_PATH_NORMALIZER) + _ * this.span.getLocalRootSpan() >> this.span + _ * _ + } + def "test onConnection"() { setup: def ctx = Mock(AgentSpan.Context.Extracted) @@ -268,6 +288,7 @@ class HttpServerDecoratorTest extends ServerDecoratorTest { when: decorator.onRequest(this.span, [peerIp: '4.4.4.4'], null, ctx) + _ * this.span.getLocalRootSpan() >> this.span then: 2 * ctx.getXForwardedFor() >> '2.3.4.5' 1 * this.span.setTag(Tags.HTTP_CLIENT_IP, '2.3.4.5') @@ -418,10 +439,13 @@ class HttpServerDecoratorTest extends ServerDecoratorTest { def mSpan = Mock(AgentSpan) { getRequestContext() >> reqCtxt } + def mTracer = Mock(TracerAPI) { startSpan(_, _, _) >> mSpan getCallbackProvider(RequestContextSlot.APPSEC) >> cbpAppSec getCallbackProvider(RequestContextSlot.IAST) >> CallbackProvider.CallbackProviderNoop.INSTANCE + getUniversalCallbackProvider() >> cbpAppSec // no iast callbacks, so this is equivalent + getDataStreamsMonitoring() >> Mock(DataStreamsMonitoring) } def decorator = newDecorator(mTracer) diff --git a/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/AgentInstaller.java b/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/AgentInstaller.java index 9d5b176949c..fe4673541e3 100644 --- a/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/AgentInstaller.java +++ b/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/AgentInstaller.java @@ -3,11 +3,11 @@ import static datadog.trace.agent.tooling.bytebuddy.matcher.GlobalIgnoresMatcher.globalIgnoresMatcher; import static net.bytebuddy.matcher.ElementMatchers.isDefaultFinalizer; -import datadog.trace.agent.tooling.bytebuddy.DDCachingPoolStrategy; -import datadog.trace.agent.tooling.bytebuddy.DDOutlinePoolStrategy; import datadog.trace.agent.tooling.bytebuddy.SharedTypePools; +import datadog.trace.agent.tooling.bytebuddy.iast.TaintableRedefinitionStrategyListener; import datadog.trace.agent.tooling.bytebuddy.matcher.DDElementMatchers; import datadog.trace.agent.tooling.bytebuddy.memoize.MemoizedMatchers; +import datadog.trace.agent.tooling.bytebuddy.outline.TypePoolFacade; import datadog.trace.agent.tooling.usm.UsmExtractorImpl; import datadog.trace.agent.tooling.usm.UsmMessageFactoryImpl; import datadog.trace.api.InstrumenterConfig; @@ -96,11 +96,7 @@ public static ClassFileTransformer installBytebuddyAgent( final AgentBuilder.Listener... listeners) { Utils.setInstrumentation(inst); - if (InstrumenterConfig.get().isResolverOutliningEnabled()) { - DDOutlinePoolStrategy.registerTypePoolFacade(); - } else { - DDCachingPoolStrategy.registerAsSupplier(); - } + TypePoolFacade.registerAsSupplier(); if (InstrumenterConfig.get().isResolverMemoizingEnabled()) { MemoizedMatchers.registerAsSupplier(); @@ -124,6 +120,7 @@ public static ClassFileTransformer installBytebuddyAgent( .with(AgentStrategies.transformerDecorator()) .with(AgentBuilder.RedefinitionStrategy.RETRANSFORMATION) .with(AgentStrategies.rediscoveryStrategy()) + .with(redefinitionStrategyListener(enabledSystems)) .with(AgentStrategies.locationStrategy()) .with(AgentStrategies.poolStrategy()) .with(AgentBuilder.DescriptionStrategy.Default.POOL_ONLY) @@ -140,6 +137,7 @@ public static ClassFileTransformer installBytebuddyAgent( agentBuilder .with(AgentBuilder.RedefinitionStrategy.RETRANSFORMATION) .with(AgentStrategies.rediscoveryStrategy()) + .with(redefinitionStrategyListener(enabledSystems)) .with(new RedefinitionLoggingListener()) .with(new TransformLoggingListener()); } @@ -260,6 +258,15 @@ private static void addByteBuddyRawSetting() { } } + private static AgentBuilder.RedefinitionStrategy.Listener redefinitionStrategyListener( + final Set enabledSystems) { + if (enabledSystems.contains(Instrumenter.TargetSystem.IAST)) { + return TaintableRedefinitionStrategyListener.INSTANCE; + } else { + return AgentBuilder.RedefinitionStrategy.Listener.NoOp.INSTANCE; + } + } + static class RedefinitionLoggingListener implements AgentBuilder.RedefinitionStrategy.Listener { private static final Logger log = LoggerFactory.getLogger(RedefinitionLoggingListener.class); diff --git a/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/AgentStrategies.java b/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/AgentStrategies.java index 124fcf3434c..23309b282a6 100644 --- a/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/AgentStrategies.java +++ b/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/AgentStrategies.java @@ -1,12 +1,10 @@ package datadog.trace.agent.tooling; -import datadog.trace.agent.tooling.bytebuddy.DDCachingPoolStrategy; import datadog.trace.agent.tooling.bytebuddy.DDClassFileTransformer; import datadog.trace.agent.tooling.bytebuddy.DDLocationStrategy; import datadog.trace.agent.tooling.bytebuddy.DDOutlinePoolStrategy; import datadog.trace.agent.tooling.bytebuddy.DDOutlineTypeStrategy; import datadog.trace.agent.tooling.bytebuddy.DDRediscoveryStrategy; -import datadog.trace.api.InstrumenterConfig; import datadog.trace.api.Platform; import net.bytebuddy.agent.builder.AgentBuilder.ClassFileBufferStrategy; import net.bytebuddy.agent.builder.AgentBuilder.LocationStrategy; @@ -45,21 +43,9 @@ private static TransformerDecorator loadTransformerDecorator() { private static final DiscoveryStrategy REDISCOVERY_STRATEGY = new DDRediscoveryStrategy(); private static final LocationStrategy LOCATION_STRATEGY = new DDLocationStrategy(); - private static final PoolStrategy POOL_STRATEGY; - private static final ClassFileBufferStrategy BUFFER_STRATEGY; - private static final TypeStrategy TYPE_STRATEGY; - - static { - if (InstrumenterConfig.get().isResolverOutliningEnabled()) { - POOL_STRATEGY = DDOutlinePoolStrategy.INSTANCE; - BUFFER_STRATEGY = DDOutlineTypeStrategy.INSTANCE; - TYPE_STRATEGY = DDOutlineTypeStrategy.INSTANCE; - } else { - POOL_STRATEGY = DDCachingPoolStrategy.INSTANCE; - BUFFER_STRATEGY = ClassFileBufferStrategy.Default.RETAINING; - TYPE_STRATEGY = TypeStrategy.Default.REDEFINE_FROZEN; - } - } + private static final PoolStrategy POOL_STRATEGY = DDOutlinePoolStrategy.INSTANCE; + private static final ClassFileBufferStrategy BUFFER_STRATEGY = DDOutlineTypeStrategy.INSTANCE; + private static final TypeStrategy TYPE_STRATEGY = DDOutlineTypeStrategy.INSTANCE; public static TransformerDecorator transformerDecorator() { return TRANSFORMER_DECORATOR; diff --git a/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/CombiningTransformerBuilder.java b/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/CombiningTransformerBuilder.java index e4aa7004a1d..e1cb97d7c14 100644 --- a/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/CombiningTransformerBuilder.java +++ b/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/CombiningTransformerBuilder.java @@ -7,6 +7,7 @@ import static net.bytebuddy.matcher.ElementMatchers.isSynthetic; import static net.bytebuddy.matcher.ElementMatchers.not; +import datadog.trace.agent.tooling.Instrumenter.WithPostProcessor; import datadog.trace.agent.tooling.bytebuddy.ExceptionHandlers; import datadog.trace.agent.tooling.context.FieldBackedContextInjector; import datadog.trace.agent.tooling.context.FieldBackedContextMatcher; @@ -22,6 +23,7 @@ import java.util.List; import java.util.Map; import net.bytebuddy.agent.builder.AgentBuilder; +import net.bytebuddy.asm.Advice; import net.bytebuddy.description.method.MethodDescription; import net.bytebuddy.matcher.ElementMatcher; @@ -38,6 +40,12 @@ public final class CombiningTransformerBuilder extends AbstractTransformerBuilde private final List advice = new ArrayList<>(); private ElementMatcher ignoredMethods; + /** + * Post processor to be applied to instrumenter advices if they implement {@link + * WithPostProcessor} + */ + private Advice.PostProcessor.Factory postProcessor; + public CombiningTransformerBuilder(AgentBuilder agentBuilder, int maxInstrumentationId) { this.agentBuilder = agentBuilder; int maxInstrumentationCount = maxInstrumentationId + 1; @@ -108,6 +116,11 @@ private void buildInstrumentationMatcher(Instrumenter.Default instrumenter, int private void buildInstrumentationAdvice(Instrumenter.Default instrumenter, int id) { + postProcessor = + instrumenter instanceof WithPostProcessor + ? ((WithPostProcessor) instrumenter).postProcessor() + : null; + String[] helperClassNames = instrumenter.helperClassNames(); if (instrumenter.injectHelperDependencies()) { helperClassNames = HelperScanner.withClassDependencies(helperClassNames); @@ -159,8 +172,12 @@ protected void buildSingleAdvice(Instrumenter.ForSingleType instrumenter) { @Override public void applyAdvice(ElementMatcher matcher, String name) { + Advice.WithCustomMapping customMapping = Advice.withCustomMapping(); + if (postProcessor != null) { + customMapping = customMapping.with(postProcessor); + } advice.add( - new AgentBuilder.Transformer.ForAdvice() + new AgentBuilder.Transformer.ForAdvice(customMapping) .include(Utils.getBootstrapProxy(), Utils.getAgentClassLoader()) .withExceptionHandler(ExceptionHandlers.defaultExceptionHandler()) .advice(not(ignoredMethods).and(matcher), name)); diff --git a/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/bytebuddy/DDCachingPoolStrategy.java b/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/bytebuddy/DDCachingPoolStrategy.java deleted file mode 100644 index f990030aa72..00000000000 --- a/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/bytebuddy/DDCachingPoolStrategy.java +++ /dev/null @@ -1,452 +0,0 @@ -package datadog.trace.agent.tooling.bytebuddy; - -import static datadog.trace.agent.tooling.bytebuddy.ClassFileLocators.classFileLocator; -import static datadog.trace.bootstrap.AgentClassLoading.LOCATING_CLASS; - -import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; -import datadog.trace.api.InstrumenterConfig; -import datadog.trace.api.cache.DDCache; -import datadog.trace.api.cache.DDCaches; -import java.lang.ref.WeakReference; -import java.util.concurrent.ConcurrentMap; -import java.util.function.Function; -import net.bytebuddy.agent.builder.AgentBuilder; -import net.bytebuddy.description.annotation.AnnotationList; -import net.bytebuddy.description.method.MethodDescription; -import net.bytebuddy.description.method.MethodList; -import net.bytebuddy.description.type.TypeDescription; -import net.bytebuddy.description.type.TypeList; -import net.bytebuddy.dynamic.ClassFileLocator; -import net.bytebuddy.pool.TypePool; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * NEW (Jan 2020) Custom Pool strategy. - * - *
    - * Uses a Guava Cache directly... - *
  • better control over locking than WeakMap.Provider - *
  • provides direct control over concurrency level - *
  • initial and maximum capacity - *
- * - *
    - * There two core parts to the cache... - *
  • a cache of ClassLoader to WeakReference<ClassLoader> - *
  • a single cache of TypeResolutions for all ClassLoaders - keyed by a custom composite key of - * ClassLoader & class name - *
- * - *

This design was chosen to create a single limited size cache that can be adjusted for the - * entire application -- without having to create a large number of WeakReference objects. - * - *

Eviction is handled almost entirely through a size restriction; however, softValues are still - * used as a further safeguard. - */ -public final class DDCachingPoolStrategy - implements AgentBuilder.PoolStrategy, SharedTypePools.Supplier { - private static final Logger log = LoggerFactory.getLogger(DDCachingPoolStrategy.class); - // Many things are package visible for testing purposes -- - // others to avoid creation of synthetic accessors - - static final int CONCURRENCY_LEVEL = 8; - static final int LOADER_CAPACITY = 64; - static final int TYPE_CAPACITY = InstrumenterConfig.get().getResolverTypePoolSize(); - - static final int BOOTSTRAP_HASH = 7236344; // Just a random number - - private static final Function> WEAK_REF = - WeakReference::new; - - public static final DDCachingPoolStrategy INSTANCE = - new DDCachingPoolStrategy(InstrumenterConfig.get().isResolverUseLoadClass()); - - public static void registerAsSupplier() { - SharedTypePools.registerIfAbsent(INSTANCE); - } - - /** - * Cache of recent ClassLoader WeakReferences; used to... - * - *

    - *
  • Reduced number of WeakReferences created - *
  • Allow for quick fast path equivalence check of composite keys - *
- */ - final DDCache> loaderRefCache = - DDCaches.newFixedSizeWeakKeyCache(LOADER_CAPACITY); - - /** - * Single shared Type.Resolution cache -- uses a composite key -- conceptually of loader & name - */ - final ConcurrentMap sharedResolutionCache = - new ConcurrentLinkedHashMap.Builder() - .maximumWeightedCapacity(TYPE_CAPACITY) - .concurrencyLevel(CONCURRENCY_LEVEL) - .build(); - - /** Fast path for bootstrap */ - final SharedResolutionCacheAdapter bootstrapCacheProvider; - - private final boolean fallBackToLoadClass; - - // visible for testing - DDCachingPoolStrategy() { - this(true); - } - - private DDCachingPoolStrategy(boolean fallBackToLoadClass) { - this.fallBackToLoadClass = fallBackToLoadClass; - bootstrapCacheProvider = - new SharedResolutionCacheAdapter( - BOOTSTRAP_HASH, null, sharedResolutionCache, fallBackToLoadClass); - } - - @Override - public TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader) { - if (classLoader == null) { - return createCachingTypePool(bootstrapCacheProvider, classFileLocator); - } - - WeakReference loaderRef = loaderRefCache.computeIfAbsent(classLoader, WEAK_REF); - - final int loaderHash = classLoader.hashCode(); - return createCachingTypePool(loaderHash, loaderRef, classFileLocator); - } - - @Override - public TypePool typePool( - ClassFileLocator classFileLocator, ClassLoader classLoader, String name) { - // FIXME satisfy interface constraint that currently instrumented type is not cached - return typePool(classFileLocator, classLoader); - } - - @Override - public TypePool typePool(ClassLoader classLoader) { - return typePool(classFileLocator(classLoader), classLoader); - } - - @Override - public void annotationOfInterest(String name) {} - - @Override - public void endInstall() {} - - @Override - public void endTransform() {} - - @Override - public void clear() { - sharedResolutionCache.clear(); - } - - private TypePool.CacheProvider createCacheProvider( - final int loaderHash, final WeakReference loaderRef) { - return new SharedResolutionCacheAdapter( - loaderHash, loaderRef, sharedResolutionCache, fallBackToLoadClass); - } - - private TypePool createCachingTypePool( - final int loaderHash, - final WeakReference loaderRef, - final ClassFileLocator classFileLocator) { - return new TypePool.Default.WithLazyResolution( - createCacheProvider(loaderHash, loaderRef), - classFileLocator, - TypePool.Default.ReaderMode.FAST); - } - - private TypePool createCachingTypePool( - final TypePool.CacheProvider cacheProvider, final ClassFileLocator classFileLocator) { - return new TypePool.Default.WithLazyResolution( - cacheProvider, classFileLocator, TypePool.Default.ReaderMode.FAST); - } - - final long approximateSize() { - return sharedResolutionCache.size(); - } - - /** - * TypeCacheKey is key for the sharedResolutionCache. Conceptually, it is a mix of ClassLoader & - * class name. - * - *

For efficiency & GC purposes, it is actually composed of loaderHash & - * WeakReference<ClassLoader> - * - *

The loaderHash exists to avoid calling get & strengthening the Reference. - */ - static final class TypeCacheKey { - private final int loaderHash; - private final WeakReference loaderRef; - private final String className; - - private final int hashCode; - - TypeCacheKey( - final int loaderHash, final WeakReference loaderRef, final String className) { - this.loaderHash = loaderHash; - this.loaderRef = loaderRef; - this.className = className; - - hashCode = 31 * this.loaderHash + className.hashCode(); - } - - @Override - public final int hashCode() { - return hashCode; - } - - @Override - public boolean equals(final Object obj) { - if (!(obj instanceof TypeCacheKey)) { - return false; - } - - final TypeCacheKey that = (TypeCacheKey) obj; - - if (loaderHash != that.loaderHash) { - return false; - } - - if (className.equals(that.className)) { - // Fastpath loaderRef equivalence -- works because of WeakReference cache used - // Also covers the bootstrap null loaderRef case - if (loaderRef == that.loaderRef) { - return true; - } - - // need to perform a deeper loader check -- requires calling Reference.get - // which can strengthen the Reference, so deliberately done last - - // If either reference has gone null, they aren't considered equivalent - // Technically, this is a bit of violation of equals semantics, since - // two equivalent references can become not equivalent. - - // In this case, it is fine because that means the ClassLoader is no - // longer live, so the entries will never match anyway and will fall - // out of the cache. - final ClassLoader thisLoader = loaderRef.get(); - if (thisLoader == null) { - return false; - } - - final ClassLoader thatLoader = that.loaderRef.get(); - if (thatLoader == null) { - return false; - } - - return (thisLoader == thatLoader); - } else { - return false; - } - } - } - - static final class SharedResolutionCacheAdapter implements TypePool.CacheProvider { - private static final String OBJECT_NAME = "java.lang.Object"; - private static final TypePool.Resolution OBJECT_RESOLUTION = - new TypePool.Resolution.Simple( - new CachingTypeDescription(TypeDescription.ForLoadedType.of(Object.class))); - - private final int loaderHash; - private final WeakReference loaderRef; - private final ConcurrentMap sharedResolutionCache; - private final boolean fallBackToLoadClass; - - SharedResolutionCacheAdapter( - final int loaderHash, - final WeakReference loaderRef, - final ConcurrentMap sharedResolutionCache, - final boolean fallBackToLoadClass) { - this.loaderHash = loaderHash; - this.loaderRef = loaderRef; - this.sharedResolutionCache = sharedResolutionCache; - this.fallBackToLoadClass = fallBackToLoadClass; - } - - @Override - public TypePool.Resolution find(final String className) { - final TypePool.Resolution existingResolution = - sharedResolutionCache.get(new TypeCacheKey(loaderHash, loaderRef, className)); - if (existingResolution != null) { - return existingResolution; - } - - if (OBJECT_NAME.equals(className)) { - return OBJECT_RESOLUTION; - } - - return null; - } - - @Override - public TypePool.Resolution register(final String className, TypePool.Resolution resolution) { - if (OBJECT_NAME.equals(className)) { - return resolution; - } - - if (fallBackToLoadClass && resolution instanceof TypePool.Resolution.Illegal) { - // If the normal pool only resolution have failed then fall back to creating the type - // description from a loaded type by trying to load the class. This case is very rare and is - // here to handle classes that are injected directly via calls to defineClass without - // providing a way to get the class bytes. - resolution = new CachingResolutionForMaybeLoadableType(loaderRef, className); - } else { - resolution = new CachingResolution(resolution); - } - - sharedResolutionCache.put(new TypeCacheKey(loaderHash, loaderRef, className), resolution); - return resolution; - } - - @Override - public void clear() { - // Allowing the high-level eviction policy make the clearing decisions - } - } - - private static class CachingResolutionForMaybeLoadableType implements TypePool.Resolution { - private final WeakReference loaderRef; - private final String className; - private volatile TypeDescription typeDescription = null; - private volatile boolean isResolved = false; - - public CachingResolutionForMaybeLoadableType( - WeakReference loaderRef, String className) { - this.loaderRef = loaderRef; - this.className = className; - } - - @Override - public boolean isResolved() { - return isResolved; - } - - @Override - public TypeDescription resolve() { - // Intentionally not "thread safe". Duplicate work deemed an acceptable trade-off. - if (!isResolved) { - Class klass = null; - ClassLoader classLoader = null; - LOCATING_CLASS.begin(); - try { - // Please note that by doing a loadClass, the type we are resolving will bypass - // transformation since we are in the middle of a transformation. This should - // be a very rare occurrence and not affect any classes we want to instrument. - if (loaderRef != null) { - classLoader = loaderRef.get(); - if (classLoader != null) { - klass = classLoader.loadClass(className); - } else { - // classloader has been unloaded - } - } else { // bootstrap type resolution - klass = Class.forName(className, false, null); - } - } catch (Throwable ignored) { - } finally { - LOCATING_CLASS.end(); - } - if (klass != null) { - // We managed to load the class - typeDescription = TypeDescription.ForLoadedType.of(klass); - log.debug( - "Direct loadClass type resolution of {} from class loader {} bypass transformation", - className, - classLoader); - } - isResolved = true; - } - if (typeDescription == null) { - throw new IllegalStateException("Cannot resolve type description for " + className); - } - return typeDescription; - } - } - - private static class CachingResolution implements TypePool.Resolution { - private final TypePool.Resolution delegate; - private TypeDescription cachedResolution; - - public CachingResolution(final TypePool.Resolution delegate) { - this.delegate = delegate; - } - - @Override - public boolean isResolved() { - return delegate.isResolved(); - } - - @Override - public TypeDescription resolve() { - // Intentionally not "thread safe". Duplicate work deemed an acceptable trade-off. - if (cachedResolution == null) { - cachedResolution = new CachingTypeDescription(delegate.resolve()); - } - return cachedResolution; - } - } - - /** - * TypeDescription implementation that delegates and caches the results for the expensive calls - * commonly used by our instrumentation. - */ - private static class CachingTypeDescription - extends TypeDescription.AbstractBase.OfSimpleType.WithDelegation { - private final TypeDescription delegate; - - // These fields are intentionally not "thread safe". - // Duplicate work deemed an acceptable trade-off. - private Generic superClass; - private TypeList.Generic interfaces; - private AnnotationList annotations; - private MethodList methods; - - public CachingTypeDescription(final TypeDescription delegate) { - this.delegate = delegate; - } - - @Override - protected TypeDescription delegate() { - return delegate; - } - - @Override - public Generic getSuperClass() { - if (superClass == null) { - superClass = delegate.getSuperClass(); - } - return superClass; - } - - @Override - public TypeList.Generic getInterfaces() { - if (interfaces == null) { - interfaces = delegate.getInterfaces(); - } - return interfaces; - } - - @Override - public AnnotationList getDeclaredAnnotations() { - if (annotations == null) { - annotations = delegate.getDeclaredAnnotations(); - } - return annotations; - } - - @Override - public MethodList getDeclaredMethods() { - if (methods == null) { - methods = delegate.getDeclaredMethods(); - } - return methods; - } - - @Override - public String getName() { - return delegate.getName(); - } - } -} diff --git a/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/bytebuddy/DDOutlinePoolStrategy.java b/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/bytebuddy/DDOutlinePoolStrategy.java index 5317a97a359..c8ddb34f915 100644 --- a/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/bytebuddy/DDOutlinePoolStrategy.java +++ b/dd-java-agent/agent-builder/src/main/java/datadog/trace/agent/tooling/bytebuddy/DDOutlinePoolStrategy.java @@ -18,10 +18,6 @@ public final class DDOutlinePoolStrategy implements AgentBuilder.PoolStrategy { public static final AgentBuilder.PoolStrategy INSTANCE = new DDOutlinePoolStrategy(); - public static void registerTypePoolFacade() { - TypePoolFacade.registerAsSupplier(); - } - @Override public TypePool typePool(ClassFileLocator ignored, ClassLoader classLoader) { // it's safe to ignore this ClassFileLocator because we capture the target bytecode diff --git a/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/test/DefaultInstrumenterForkedTest.groovy b/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/test/DefaultInstrumenterForkedTest.groovy index f79ead320e3..555abcb5860 100644 --- a/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/test/DefaultInstrumenterForkedTest.groovy +++ b/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/test/DefaultInstrumenterForkedTest.groovy @@ -1,8 +1,8 @@ package datadog.trace.agent.test import datadog.trace.agent.tooling.Instrumenter -import datadog.trace.agent.tooling.bytebuddy.DDCachingPoolStrategy import datadog.trace.agent.tooling.bytebuddy.matcher.DDElementMatchers +import datadog.trace.agent.tooling.bytebuddy.outline.TypePoolFacade import datadog.trace.test.util.DDSpecification import spock.lang.Shared @@ -11,7 +11,7 @@ import java.lang.instrument.Instrumentation class DefaultInstrumenterForkedTest extends DDSpecification { static { - DDCachingPoolStrategy.registerAsSupplier() + TypePoolFacade.registerAsSupplier() DDElementMatchers.registerAsSupplier() } diff --git a/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/CacheProviderTest.groovy b/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/CacheProviderTest.groovy deleted file mode 100644 index 7f4d81883fb..00000000000 --- a/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/CacheProviderTest.groovy +++ /dev/null @@ -1,222 +0,0 @@ -package datadog.trace.agent.tooling.bytebuddy - -import datadog.trace.test.util.DDSpecification -import net.bytebuddy.description.type.TypeDescription -import net.bytebuddy.dynamic.ClassFileLocator -import net.bytebuddy.pool.TypePool -import spock.lang.Timeout - -import java.lang.ref.WeakReference - -@Timeout(5) -class CacheProviderTest extends DDSpecification { - def "key bootstrap equivalence"() { - // def loader = null - def loaderHash = DDCachingPoolStrategy.BOOTSTRAP_HASH - def loaderRef = null - - def key1 = new DDCachingPoolStrategy.TypeCacheKey(loaderHash, loaderRef, "foo") - def key2 = new DDCachingPoolStrategy.TypeCacheKey(loaderHash, loaderRef, "foo") - - expect: - key1.hashCode() == key2.hashCode() - key1.equals(key2) - } - - def "key same ref equivalence"() { - setup: - def loader = newClassLoader() - def loaderHash = loader.hashCode() - def loaderRef = new WeakReference(loader) - - def key1 = new DDCachingPoolStrategy.TypeCacheKey(loaderHash, loaderRef, "foo") - def key2 = new DDCachingPoolStrategy.TypeCacheKey(loaderHash, loaderRef, "foo") - - expect: - key1.hashCode() == key2.hashCode() - key1.equals(key2) - } - - def "key different ref equivalence"() { - setup: - def loader = newClassLoader() - def loaderHash = loader.hashCode() - def loaderRef1 = new WeakReference(loader) - def loaderRef2 = new WeakReference(loader) - - def key1 = new DDCachingPoolStrategy.TypeCacheKey(loaderHash, loaderRef1, "foo") - def key2 = new DDCachingPoolStrategy.TypeCacheKey(loaderHash, loaderRef2, "foo") - - expect: - loaderRef1 != loaderRef2 - - key1.hashCode() == key2.hashCode() - key1.equals(key2) - } - - def "key mismatch -- same loader - diff name"() { - setup: - def loader = newClassLoader() - def loaderHash = loader.hashCode() - def loaderRef = new WeakReference(loader) - def fooKey = new DDCachingPoolStrategy.TypeCacheKey(loaderHash, loaderRef, "foo") - def barKey = new DDCachingPoolStrategy.TypeCacheKey(loaderHash, loaderRef, "bar") - - expect: - // not strictly guaranteed -- but important for performance - fooKey.hashCode() != barKey.hashCode() - !fooKey.equals(barKey) - } - - def "key mismatch -- same name - diff loader"() { - setup: - def loader1 = newClassLoader() - def loader1Hash = loader1.hashCode() - def loaderRef1 = new WeakReference(loader1) - - def loader2 = newClassLoader() - def loader2Hash = loader2.hashCode() - def loaderRef2 = new WeakReference(loader2) - - def fooKey1 = new DDCachingPoolStrategy.TypeCacheKey(loader1Hash, loaderRef1, "foo") - def fooKey2 = new DDCachingPoolStrategy.TypeCacheKey(loader2Hash, loaderRef2, "foo") - - expect: - // not strictly guaranteed -- but important for performance - fooKey1.hashCode() != fooKey2.hashCode() - !fooKey1.equals(fooKey2) - } - - def "test basic caching"() { - setup: - def poolStrat = new DDCachingPoolStrategy() - - def loader = newClassLoader() - def loaderHash = loader.hashCode() - def loaderRef = new WeakReference(loader) - - def cacheProvider = poolStrat.createCacheProvider(loaderHash, loaderRef) - - when: - cacheProvider.register("foo", new TypePool.Resolution.Simple(TypeDescription.ForLoadedType.of(void.class))) - - then: - // not strictly guaranteed, but fine for this test - cacheProvider.find("foo") != null - poolStrat.approximateSize() == 1 - } - - def "test loader equivalence"() { - setup: - def poolStrat = new DDCachingPoolStrategy() - - def loader1 = newClassLoader() - def loaderHash1 = loader1.hashCode() - def loaderRef1A = new WeakReference(loader1) - def loaderRef1B = new WeakReference(loader1) - - def cacheProvider1A = poolStrat.createCacheProvider(loaderHash1, loaderRef1A) - def cacheProvider1B = poolStrat.createCacheProvider(loaderHash1, loaderRef1B) - - when: - cacheProvider1A.register("foo", newVoid()) - - then: - // not strictly guaranteed, but fine for this test - cacheProvider1A.find("foo") != null - cacheProvider1B.find("foo") != null - - cacheProvider1A.find("foo").is(cacheProvider1B.find("foo")) - poolStrat.approximateSize() == 1 - } - - def "test loader separation"() { - setup: - def poolStrat = new DDCachingPoolStrategy() - - def loader1 = newClassLoader() - def loaderHash1 = loader1.hashCode() - def loaderRef1 = new WeakReference(loader1) - - def loader2 = newClassLoader() - def loaderHash2 = loader2.hashCode() - def loaderRef2 = new WeakReference(loader2) - - def cacheProvider1 = poolStrat.createCacheProvider(loaderHash1, loaderRef1) - def cacheProvider2 = poolStrat.createCacheProvider(loaderHash2, loaderRef2) - - when: - cacheProvider1.register("foo", newVoid()) - cacheProvider2.register("foo", newVoid()) - - then: - // not strictly guaranteed, but fine for this test - cacheProvider1.find("foo") != null - cacheProvider2.find("foo") != null - - !cacheProvider1.find("foo").is(cacheProvider2.find("foo")) - poolStrat.approximateSize() == 2 - } - - def "test capacity"() { - setup: - def poolStrat = new DDCachingPoolStrategy() - def capacity = DDCachingPoolStrategy.TYPE_CAPACITY - - def loader1 = newClassLoader() - def loaderHash1 = loader1.hashCode() - def loaderRef1 = new WeakReference(loader1) - - def loader2 = newClassLoader() - def loaderHash2 = loader2.hashCode() - def loaderRef2 = new WeakReference(loader2) - - def cacheProvider1 = poolStrat.createCacheProvider(loaderHash1, loaderRef1) - def cacheProvider2 = poolStrat.createCacheProvider(loaderHash2, loaderRef2) - - def id = 0 - - when: - (capacity / 2).times { - id += 1 - cacheProvider1.register("foo${id}", newVoid()) - cacheProvider2.register("foo${id}", newVoid()) - } - - then: - // cache will start to proactively free slots & size calc is approximate - poolStrat.approximateSize() >= 0.75 * capacity - - when: - 10.times { - id += 1 - cacheProvider1.register("foo${id}", newVoid()) - cacheProvider2.register("foo${id}", newVoid()) - } - - then: - // cache will start to proactively free slots & size calc is approximate - poolStrat.approximateSize() > 0.8 * capacity - } - - static newVoid() { - return new TypePool.Resolution.Simple(TypeDescription.ForLoadedType.of(void.class)) - } - - static newClassLoader() { - return new URLClassLoader([] as URL[], (ClassLoader) null) - } - - static newLocator() { - return new ClassFileLocator() { - @Override - ClassFileLocator.Resolution locate(String name) throws IOException { - return null - } - - @Override - void close() throws IOException { - } - } - } -} diff --git a/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/DDCachingPoolStrategyTest.groovy b/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/DDCachingPoolStrategyTest.groovy deleted file mode 100644 index 814ac481453..00000000000 --- a/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/DDCachingPoolStrategyTest.groovy +++ /dev/null @@ -1,27 +0,0 @@ -package datadog.trace.agent.tooling.bytebuddy - -import datadog.trace.agent.tooling.bytebuddy.matcher.DDElementMatchers -import datadog.trace.test.util.DDSpecification - -import java.util.concurrent.ForkJoinTask -import java.util.concurrent.Future - -import static datadog.trace.agent.tooling.bytebuddy.matcher.HierarchyMatchers.implementsInterface -import static net.bytebuddy.matcher.ElementMatchers.named - -class DDCachingPoolStrategyTest extends DDSpecification { - static { - DDCachingPoolStrategy.registerAsSupplier() - DDElementMatchers.registerAsSupplier() - } - - def "bootstrap classes can be loaded by our caching type pool"() { - setup: - def pool = SharedTypePools.typePool(null) - def description = pool.describe(ForkJoinTask.name).resolve() - - expect: - description.name == ForkJoinTask.name - implementsInterface(named(Future.name)).matches(description) - } -} diff --git a/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/matcher/AbstractHierarchyMatcherTest.groovy b/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/matcher/AbstractHierarchyMatcherTest.groovy index 0f17ee41507..0dcdabfc7a8 100644 --- a/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/matcher/AbstractHierarchyMatcherTest.groovy +++ b/dd-java-agent/agent-builder/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/matcher/AbstractHierarchyMatcherTest.groovy @@ -1,13 +1,13 @@ package datadog.trace.agent.tooling.bytebuddy.matcher -import datadog.trace.agent.tooling.bytebuddy.DDCachingPoolStrategy import datadog.trace.agent.tooling.bytebuddy.SharedTypePools +import datadog.trace.agent.tooling.bytebuddy.outline.TypePoolFacade import datadog.trace.test.util.DDSpecification import spock.lang.Shared abstract class AbstractHierarchyMatcherTest extends DDSpecification { static { - DDCachingPoolStrategy.registerAsSupplier() + TypePoolFacade.registerAsSupplier() DDElementMatchers.registerAsSupplier() } diff --git a/dd-java-agent/agent-ci-visibility/build.gradle b/dd-java-agent/agent-ci-visibility/build.gradle index 8b68cc7cc48..9917f17cbdb 100644 --- a/dd-java-agent/agent-ci-visibility/build.gradle +++ b/dd-java-agent/agent-ci-visibility/build.gradle @@ -12,12 +12,15 @@ minimumInstructionCoverage = 0.8 excludedClassesCoverage += [ "datadog.trace.civisibility.CiVisibilitySystem", "datadog.trace.civisibility.CiVisibilitySystem.1", - "datadog.trace.civisibility.DDTestModuleChild", + "datadog.trace.civisibility.DDBuildSystemModuleImpl", + "datadog.trace.civisibility.DDBuildSystemSessionImpl", + "datadog.trace.civisibility.DDTestFrameworkModuleImpl", + "datadog.trace.civisibility.DDTestFrameworkModuleProxy", + "datadog.trace.civisibility.DDTestFrameworkModuleProxy.TestFrameworkData", + "datadog.trace.civisibility.DDTestFrameworkSessionImpl", + "datadog.trace.civisibility.DDTestFrameworkSessionProxy", "datadog.trace.civisibility.DDTestModuleImpl", - "datadog.trace.civisibility.DDTestModuleParent", - "datadog.trace.civisibility.DDTestSessionChild", "datadog.trace.civisibility.DDTestSessionImpl", - "datadog.trace.civisibility.DDTestSessionParent", "datadog.trace.civisibility.DDTestSuiteImpl", "datadog.trace.civisibility.DDTestImpl", "datadog.trace.civisibility.TestModuleRegistry", @@ -27,6 +30,7 @@ excludedClassesCoverage += [ "datadog.trace.civisibility.config.JvmInfoFactory", "datadog.trace.civisibility.config.ConfigurationApi", "datadog.trace.civisibility.config.ModuleExecutionSettingsFactory", + "datadog.trace.civisibility.config.JvmInfo", "datadog.trace.civisibility.config.JvmInfoFactory.JvmVersionOutputParser", "datadog.trace.civisibility.config.CachingModuleExecutionSettingsFactory", "datadog.trace.civisibility.config.CachingModuleExecutionSettingsFactory.Key", @@ -35,11 +39,8 @@ excludedClassesCoverage += [ "datadog.trace.civisibility.config.ConfigurationApi.1", "datadog.trace.civisibility.config.ModuleExecutionSettingsFactoryImpl", "datadog.trace.civisibility.config.SkippableTestsSerializer", - "datadog.trace.civisibility.context.AbstractTestContext", - "datadog.trace.civisibility.context.EmptyTestContext", - "datadog.trace.civisibility.context.ParentProcessTestContext", - "datadog.trace.civisibility.context.SpanTestContext", - "datadog.trace.civisibility.coverage.TestProbes.TestProbesFactory", + "datadog.trace.civisibility.coverage.CoverageUtils", + "datadog.trace.civisibility.coverage.CoverageUtils.RepoIndexFileLocator", "datadog.trace.civisibility.coverage.ExecutionDataAdapter", "datadog.trace.civisibility.coverage.NoopCoverageProbeStore", "datadog.trace.civisibility.coverage.NoopCoverageProbeStore.NoopCoverageProbeStoreFactory", @@ -47,11 +48,14 @@ excludedClassesCoverage += [ "datadog.trace.civisibility.coverage.SegmentlessTestProbes.SegmentlessTestProbesFactory", "datadog.trace.civisibility.coverage.SourceAnalyzer", "datadog.trace.civisibility.coverage.TestProbes", + "datadog.trace.civisibility.coverage.TestProbes.TestProbesFactory", "datadog.trace.civisibility.events.BuildEventsHandlerImpl", "datadog.trace.civisibility.events.TestEventsHandlerImpl", "datadog.trace.civisibility.events.TestDescriptor", "datadog.trace.civisibility.events.TestModuleDescriptor", "datadog.trace.civisibility.events.TestSuiteDescriptor", + "datadog.trace.civisibility.git.CILocalGitInfoBuilder", + "datadog.trace.civisibility.git.GitClientGitInfoBuilder", "datadog.trace.civisibility.git.GitObject", "datadog.trace.civisibility.git.tree.*", "datadog.trace.civisibility.ipc.ModuleExecutionResult", @@ -60,13 +64,18 @@ excludedClassesCoverage += [ "datadog.trace.civisibility.ipc.SignalServer", "datadog.trace.civisibility.ipc.SignalType", "datadog.trace.civisibility.ipc.SignalClient", + "datadog.trace.civisibility.ipc.SignalClient.Factory", "datadog.trace.civisibility.ipc.SkippableTestsResponse", + "datadog.trace.civisibility.ipc.TestFramework", "datadog.trace.civisibility.source.MethodLinesResolver.MethodLines", + "datadog.trace.civisibility.source.index.PackageTree.Node", + "datadog.trace.civisibility.source.index.RepoIndex", "datadog.trace.civisibility.source.index.RepoIndexBuilder.RepoIndexingFileVisitor", "datadog.trace.civisibility.source.index.RepoIndexFetcher", "datadog.trace.civisibility.source.index.RepoIndexSourcePathResolver", "datadog.trace.civisibility.utils.ShellCommandExecutor", "datadog.trace.civisibility.utils.ShellCommandExecutor.OutputParser", + "datadog.trace.civisibility.utils.SpanUtils" ] dependencies { @@ -75,6 +84,7 @@ dependencies { implementation deps.asm implementation deps.asmcommons implementation group: 'org.jacoco', name: 'org.jacoco.core', version: '0.8.9' + implementation group: 'org.jacoco', name: 'org.jacoco.report', version: '0.8.9' implementation project(':communication') implementation project(':internal-api') diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/CiVisibilitySystem.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/CiVisibilitySystem.java index 3a6d75d876e..7f0c82c9304 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/CiVisibilitySystem.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/CiVisibilitySystem.java @@ -4,10 +4,9 @@ import datadog.trace.api.Config; import datadog.trace.api.civisibility.CIVisibility; import datadog.trace.api.civisibility.InstrumentationBridge; -import datadog.trace.api.civisibility.coverage.CoverageProbeStore; +import datadog.trace.api.civisibility.coverage.CoverageDataSupplier; import datadog.trace.api.civisibility.events.BuildEventsHandler; import datadog.trace.api.civisibility.events.TestEventsHandler; -import datadog.trace.api.civisibility.source.SourcePathResolver; import datadog.trace.api.config.CiVisibilityConfig; import datadog.trace.api.git.GitInfoProvider; import datadog.trace.civisibility.ci.CIInfo; @@ -24,6 +23,7 @@ import datadog.trace.civisibility.config.JvmInfoFactory; import datadog.trace.civisibility.config.ModuleExecutionSettingsFactory; import datadog.trace.civisibility.config.ModuleExecutionSettingsFactoryImpl; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; import datadog.trace.civisibility.coverage.NoopCoverageProbeStore; import datadog.trace.civisibility.coverage.SegmentlessTestProbes; import datadog.trace.civisibility.coverage.TestProbes; @@ -33,29 +33,31 @@ import datadog.trace.civisibility.events.TestEventsHandlerImpl; import datadog.trace.civisibility.git.CILocalGitInfoBuilder; import datadog.trace.civisibility.git.CIProviderGitInfoBuilder; +import datadog.trace.civisibility.git.GitClientGitInfoBuilder; import datadog.trace.civisibility.git.tree.GitClient; import datadog.trace.civisibility.git.tree.GitDataApi; import datadog.trace.civisibility.git.tree.GitDataUploader; import datadog.trace.civisibility.git.tree.GitDataUploaderImpl; import datadog.trace.civisibility.ipc.SignalClient; import datadog.trace.civisibility.ipc.SignalServer; -import datadog.trace.civisibility.source.BestEfforSourcePathResolver; +import datadog.trace.civisibility.source.BestEffortMethodLinesResolver; +import datadog.trace.civisibility.source.BestEffortSourcePathResolver; +import datadog.trace.civisibility.source.ByteCodeMethodLinesResolver; +import datadog.trace.civisibility.source.CompilerAidedMethodLinesResolver; import datadog.trace.civisibility.source.CompilerAidedSourcePathResolver; import datadog.trace.civisibility.source.MethodLinesResolver; -import datadog.trace.civisibility.source.MethodLinesResolverImpl; +import datadog.trace.civisibility.source.SourcePathResolver; import datadog.trace.civisibility.source.index.RepoIndexBuilder; import datadog.trace.civisibility.source.index.RepoIndexFetcher; import datadog.trace.civisibility.source.index.RepoIndexProvider; import datadog.trace.civisibility.source.index.RepoIndexSourcePathResolver; import datadog.trace.util.Strings; -import java.io.IOException; import java.net.InetSocketAddress; import java.nio.file.FileSystems; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; import java.util.concurrent.CompletableFuture; -import java.util.function.Supplier; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -72,20 +74,31 @@ public static void start(SharedCommunicationObjects sco) { return; } - GitInfoProvider.INSTANCE.registerGitInfoBuilder(new CIProviderGitInfoBuilder()); - GitInfoProvider.INSTANCE.registerGitInfoBuilder(new CILocalGitInfoBuilder(GIT_FOLDER_NAME)); + GitClient.Factory gitClientFactory = buildGitClientFactory(config); + CoverageProbeStoreFactory coverageProbeStoreFactory = buildTestProbesFactory(config); - DDTestSessionImpl.SessionImplFactory sessionFactory = sessionFactory(config, sco); - CIVisibility.registerSessionFactory(sessionFactory); + GitInfoProvider gitInfoProvider = GitInfoProvider.INSTANCE; + gitInfoProvider.registerGitInfoBuilder(new CIProviderGitInfoBuilder()); + gitInfoProvider.registerGitInfoBuilder( + new CILocalGitInfoBuilder(gitClientFactory, GIT_FOLDER_NAME)); + gitInfoProvider.registerGitInfoBuilder(new GitClientGitInfoBuilder(config, gitClientFactory)); - InstrumentationBridge.registerTestEventsHandlerFactory( - testEventsHandlerFactory(config, sessionFactory)); InstrumentationBridge.registerBuildEventsHandlerFactory( - buildEventsHandlerFactory(sessionFactory)); - InstrumentationBridge.registerCoverageProbeStoreFactory(buildTestProbesFactory(config)); + buildEventsHandlerFactory( + config, sco, gitInfoProvider, coverageProbeStoreFactory, gitClientFactory)); + InstrumentationBridge.registerTestEventsHandlerFactory( + testEventsHandlerFactory( + config, sco, gitInfoProvider, coverageProbeStoreFactory, gitClientFactory)); + InstrumentationBridge.registerCoverageProbeStoreRegistry(coverageProbeStoreFactory); + + CIVisibility.registerSessionFactory(apiSessionFactory(config, coverageProbeStoreFactory)); + } + + private static GitClient.Factory buildGitClientFactory(Config config) { + return new GitClient.Factory(config); } - private static CoverageProbeStore.Factory buildTestProbesFactory(Config config) { + private static CoverageProbeStoreFactory buildTestProbesFactory(Config config) { if (!config.isCiVisibilityCodeCoverageEnabled()) { return new NoopCoverageProbeStore.NoopCoverageProbeStoreFactory(); } @@ -95,33 +108,140 @@ private static CoverageProbeStore.Factory buildTestProbesFactory(Config config) return new TestProbes.TestProbesFactory(); } - private static DDTestSessionImpl.SessionImplFactory sessionFactory( - Config config, SharedCommunicationObjects sco) { + private static BuildEventsHandler.Factory buildEventsHandlerFactory( + Config config, + SharedCommunicationObjects sco, + GitInfoProvider gitInfoProvider, + CoverageProbeStoreFactory coverageProbeStoreFactory, + GitClient.Factory gitClientFactory) { + DDBuildSystemSession.Factory sessionFactory = + buildSystemSessionFactory( + config, sco, gitInfoProvider, coverageProbeStoreFactory, gitClientFactory); + return new BuildEventsHandler.Factory() { + @Override + public BuildEventsHandler create() { + return new BuildEventsHandlerImpl<>(sessionFactory, new JvmInfoFactory()); + } + }; + } + + private static DDBuildSystemSession.Factory buildSystemSessionFactory( + Config config, + SharedCommunicationObjects sco, + GitInfoProvider gitInfoProvider, + CoverageProbeStoreFactory coverageProbeStoreFactory, + GitClient.Factory gitClientFactory) { BackendApiFactory backendApiFactory = new BackendApiFactory(config, sco); BackendApi backendApi = backendApiFactory.createBackendApi(); - return (String projectName, Path projectRoot, String component, Long startTime) -> { + return (String projectName, + Path projectRoot, + String startCommand, + String buildSystemName, + Long startTime) -> { + // Session needs to see the most recent commit in a repo. + // Cache shouldn't be a problem normally, + // but it can get stale if we're inside a long-running Gradle daemon + // and repo that we're using gets updated + gitInfoProvider.invalidateCache(); + CIProviderInfoFactory ciProviderInfoFactory = new CIProviderInfoFactory(config); CIProviderInfo ciProviderInfo = ciProviderInfoFactory.createCIProviderInfo(projectRoot); CIInfo ciInfo = ciProviderInfo.buildCIInfo(); String repoRoot = ciInfo.getCiWorkspace(); - RepoIndexProvider indexProvider = getRepoIndexProvider(config, repoRoot); + RepoIndexProvider indexProvider = + new RepoIndexBuilder(projectRoot.toString(), FileSystems.getDefault()); SourcePathResolver sourcePathResolver = getSourcePathResolver(repoRoot, indexProvider); Codeowners codeowners = getCodeowners(repoRoot); - MethodLinesResolver methodLinesResolver = new MethodLinesResolverImpl(); + + MethodLinesResolver methodLinesResolver = + new BestEffortMethodLinesResolver( + new CompilerAidedMethodLinesResolver(), new ByteCodeMethodLinesResolver()); + Map ciTags = new CITagsProvider().getCiTags(ciInfo); - TestDecorator testDecorator = new TestDecoratorImpl(component, ciTags); + TestDecorator testDecorator = new TestDecoratorImpl(buildSystemName, ciTags); TestModuleRegistry testModuleRegistry = new TestModuleRegistry(); - GitDataUploader gitDataUploader = buildGitDataUploader(config, backendApi, repoRoot); + GitDataUploader gitDataUploader = + buildGitDataUploader(config, gitInfoProvider, gitClientFactory, backendApi, repoRoot); ModuleExecutionSettingsFactory moduleExecutionSettingsFactory = - buildModuleExecutionSettingsFactory(config, backendApi, gitDataUploader, repoRoot); + buildModuleExecutionSettingsFactory( + config, backendApi, gitDataUploader, indexProvider, repoRoot); String signalServerHost = config.getCiVisibilitySignalServerHost(); int signalServerPort = config.getCiVisibilitySignalServerPort(); SignalServer signalServer = new SignalServer(signalServerHost, signalServerPort); + // only start Git data upload in parent process + gitDataUploader.startOrObserveGitDataUpload(); + + RepoIndexBuilder indexBuilder = new RepoIndexBuilder(repoRoot, FileSystems.getDefault()); + return new DDBuildSystemSessionImpl( + projectName, + repoRoot, + startCommand, + startTime, + config, + testModuleRegistry, + testDecorator, + sourcePathResolver, + codeowners, + methodLinesResolver, + moduleExecutionSettingsFactory, + coverageProbeStoreFactory, + signalServer, + indexBuilder); + }; + } + + private static TestEventsHandler.Factory testEventsHandlerFactory( + Config config, + SharedCommunicationObjects sco, + GitInfoProvider gitInfoProvider, + CoverageProbeStoreFactory coverageProbeStoreFactory, + GitClient.Factory gitClientFactory) { + DDTestFrameworkSession.Factory sessionFactory = + testFrameworkSessionFactory( + config, sco, gitInfoProvider, coverageProbeStoreFactory, gitClientFactory); + return (String component, Path path) -> { + CIProviderInfoFactory ciProviderInfoFactory = new CIProviderInfoFactory(config); + CIProviderInfo ciProviderInfo = ciProviderInfoFactory.createCIProviderInfo(path); + CIInfo ciInfo = ciProviderInfo.buildCIInfo(); + String repoRoot = ciInfo.getCiWorkspace(); + String moduleName = + (repoRoot != null) ? Paths.get(repoRoot).relativize(path).toString() : path.toString(); + + DDTestFrameworkSession testSession = + sessionFactory.startSession(moduleName, path, component, null); + DDTestFrameworkModule testModule = testSession.testModuleStart(moduleName, null); + return new TestEventsHandlerImpl(testSession, testModule); + }; + } + + private static DDTestFrameworkSession.Factory testFrameworkSessionFactory( + Config config, + SharedCommunicationObjects sco, + GitInfoProvider gitInfoProvider, + CoverageProbeStoreFactory coverageProbeStoreFactory, + GitClient.Factory gitClientFactory) { + BackendApiFactory backendApiFactory = new BackendApiFactory(config, sco); + BackendApi backendApi = backendApiFactory.createBackendApi(); + + return (String projectName, Path projectRoot, String component, Long startTime) -> { + CIProviderInfoFactory ciProviderInfoFactory = new CIProviderInfoFactory(config); + CIProviderInfo ciProviderInfo = ciProviderInfoFactory.createCIProviderInfo(projectRoot); + CIInfo ciInfo = ciProviderInfo.buildCIInfo(); + String repoRoot = ciInfo.getCiWorkspace(); + + Codeowners codeowners = getCodeowners(repoRoot); + MethodLinesResolver methodLinesResolver = + new BestEffortMethodLinesResolver( + new CompilerAidedMethodLinesResolver(), new ByteCodeMethodLinesResolver()); + + Map ciTags = new CITagsProvider().getCiTags(ciInfo); + TestDecorator testDecorator = new TestDecoratorImpl(component, ciTags); + // fallbacks to System.getProperty below are needed for cases when // system variables are set after config was initialized Long parentProcessSessionId = config.getCiVisibilitySessionId(); @@ -153,38 +273,35 @@ private static DDTestSessionImpl.SessionImplFactory sessionFactory( // either we are in the build system // or we are in the tests JVM and the build system is not instrumented if (parentProcessSessionId == null || parentProcessModuleId == null) { + GitDataUploader gitDataUploader = + buildGitDataUploader(config, gitInfoProvider, gitClientFactory, backendApi, repoRoot); + RepoIndexProvider indexProvider = new RepoIndexBuilder(repoRoot, FileSystems.getDefault()); + ModuleExecutionSettingsFactory moduleExecutionSettingsFactory = + buildModuleExecutionSettingsFactory( + config, backendApi, gitDataUploader, indexProvider, repoRoot); + SourcePathResolver sourcePathResolver = getSourcePathResolver(repoRoot, indexProvider); + // only start Git data upload in parent process gitDataUploader.startOrObserveGitDataUpload(); - RepoIndexBuilder indexBuilder = new RepoIndexBuilder(repoRoot, FileSystems.getDefault()); - return new DDTestSessionParent( + return new DDTestFrameworkSessionImpl( projectName, startTime, config, - testModuleRegistry, testDecorator, sourcePathResolver, codeowners, methodLinesResolver, - moduleExecutionSettingsFactory, - signalServer, - indexBuilder); - } - - InetSocketAddress signalServerAddress = null; - String host = - System.getProperty( - Strings.propertyNameToSystemPropertyName( - CiVisibilityConfig.CIVISIBILITY_SIGNAL_SERVER_HOST)); - String port = - System.getProperty( - Strings.propertyNameToSystemPropertyName( - CiVisibilityConfig.CIVISIBILITY_SIGNAL_SERVER_PORT)); - if (host != null && port != null) { - signalServerAddress = new InetSocketAddress(host, Integer.parseInt(port)); + coverageProbeStoreFactory, + moduleExecutionSettingsFactory); } - return new DDTestSessionChild( + InetSocketAddress signalServerAddress = getSignalServerAddress(); + SignalClient.Factory signalClientFactory = new SignalClient.Factory(signalServerAddress); + RepoIndexProvider indexProvider = new RepoIndexFetcher(signalClientFactory); + SourcePathResolver sourcePathResolver = getSourcePathResolver(repoRoot, indexProvider); + CoverageDataSupplier coverageDataSupplier = InstrumentationBridge::getCoverageData; + return new DDTestFrameworkSessionProxy( parentProcessSessionId, parentProcessModuleId, config, @@ -192,12 +309,64 @@ private static DDTestSessionImpl.SessionImplFactory sessionFactory( sourcePathResolver, codeowners, methodLinesResolver, + coverageProbeStoreFactory, + coverageDataSupplier, signalServerAddress); }; } + private static InetSocketAddress getSignalServerAddress() { + String host = + System.getProperty( + Strings.propertyNameToSystemPropertyName( + CiVisibilityConfig.CIVISIBILITY_SIGNAL_SERVER_HOST)); + String port = + System.getProperty( + Strings.propertyNameToSystemPropertyName( + CiVisibilityConfig.CIVISIBILITY_SIGNAL_SERVER_PORT)); + if (host != null && port != null) { + return new InetSocketAddress(host, Integer.parseInt(port)); + } else { + return null; + } + } + + private static CIVisibility.SessionFactory apiSessionFactory( + Config config, CoverageProbeStoreFactory coverageProbeStoreFactory) { + return (String projectName, Path projectRoot, String component, Long startTime) -> { + CIProviderInfoFactory ciProviderInfoFactory = new CIProviderInfoFactory(config); + CIProviderInfo ciProviderInfo = ciProviderInfoFactory.createCIProviderInfo(projectRoot); + CIInfo ciInfo = ciProviderInfo.buildCIInfo(); + String repoRoot = ciInfo.getCiWorkspace(); + + Codeowners codeowners = getCodeowners(repoRoot); + MethodLinesResolver methodLinesResolver = + new BestEffortMethodLinesResolver( + new CompilerAidedMethodLinesResolver(), new ByteCodeMethodLinesResolver()); + + Map ciTags = new CITagsProvider().getCiTags(ciInfo); + TestDecorator testDecorator = new TestDecoratorImpl(component, ciTags); + RepoIndexProvider indexProvider = new RepoIndexBuilder(repoRoot, FileSystems.getDefault()); + SourcePathResolver sourcePathResolver = getSourcePathResolver(repoRoot, indexProvider); + + return new DDTestSessionImpl( + projectName, + startTime, + config, + testDecorator, + sourcePathResolver, + codeowners, + methodLinesResolver, + coverageProbeStoreFactory); + }; + } + private static GitDataUploader buildGitDataUploader( - Config config, BackendApi backendApi, String repoRoot) { + Config config, + GitInfoProvider gitInfoProvider, + GitClient.Factory gitClientFactory, + BackendApi backendApi, + String repoRoot) { if (!config.isCiVisibilityGitUploadEnabled()) { return () -> CompletableFuture.completedFuture(null); } @@ -214,18 +383,18 @@ private static GitDataUploader buildGitDataUploader( return () -> CompletableFuture.completedFuture(null); } - long commandTimeoutMillis = config.getCiVisibilityGitCommandTimeoutMillis(); String remoteName = config.getCiVisibilityGitRemoteName(); - GitDataApi gitDataApi = new GitDataApi(backendApi); - GitClient gitClient = new GitClient(repoRoot, "1 month ago", 1000, commandTimeoutMillis); - return new GitDataUploaderImpl(config, gitDataApi, gitClient, remoteName); + GitClient gitClient = gitClientFactory.create(repoRoot); + return new GitDataUploaderImpl( + config, gitDataApi, gitClient, gitInfoProvider, repoRoot, remoteName); } private static ModuleExecutionSettingsFactory buildModuleExecutionSettingsFactory( Config config, BackendApi backendApi, GitDataUploader gitDataUploader, + RepoIndexProvider repoIndexProvider, String repositoryRoot) { ConfigurationApi configurationApi; if (backendApi == null) { @@ -238,27 +407,7 @@ private static ModuleExecutionSettingsFactory buildModuleExecutionSettingsFactor return new CachingModuleExecutionSettingsFactory( config, new ModuleExecutionSettingsFactoryImpl( - config, configurationApi, gitDataUploader, repositoryRoot)); - } - - private static RepoIndexProvider getRepoIndexProvider(Config config, String repoRoot) { - String host = config.getCiVisibilitySignalServerHost(); - int port = config.getCiVisibilitySignalServerPort(); - if (host != null && port > 0 && config.isCiVisibilityRepoIndexSharingEnabled()) { - InetSocketAddress serverAddress = new InetSocketAddress(host, port); - Supplier signalClientFactory = - () -> { - try { - return new SignalClient(serverAddress); - } catch (IOException e) { - throw new RuntimeException( - "Could not instantiate signal client. " + "Host: " + host + ", port: " + port, e); - } - }; - return new RepoIndexFetcher(signalClientFactory); - } else { - return new RepoIndexBuilder(repoRoot, FileSystems.getDefault()); - } + config, configurationApi, gitDataUploader, repoIndexProvider, repositoryRoot)); } private static SourcePathResolver getSourcePathResolver( @@ -266,7 +415,7 @@ private static SourcePathResolver getSourcePathResolver( if (repoRoot != null) { RepoIndexSourcePathResolver indexSourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, indexProvider); - return new BestEfforSourcePathResolver( + return new BestEffortSourcePathResolver( new CompilerAidedSourcePathResolver(repoRoot), indexSourcePathResolver); } else { return clazz -> null; @@ -280,31 +429,4 @@ private static Codeowners getCodeowners(String repoRoot) { return path -> null; } } - - private static TestEventsHandler.Factory testEventsHandlerFactory( - Config config, DDTestSessionImpl.SessionImplFactory sessionFactory) { - return (String component, Path path) -> { - CIProviderInfoFactory ciProviderInfoFactory = new CIProviderInfoFactory(config); - CIProviderInfo ciProviderInfo = ciProviderInfoFactory.createCIProviderInfo(path); - CIInfo ciInfo = ciProviderInfo.buildCIInfo(); - String repoRoot = ciInfo.getCiWorkspace(); - String moduleName = - (repoRoot != null) ? Paths.get(repoRoot).relativize(path).toString() : path.toString(); - - DDTestSessionImpl testSession = - sessionFactory.startSession(moduleName, path, component, null); - DDTestModuleImpl testModule = testSession.testModuleStart(moduleName, null); - return new TestEventsHandlerImpl(testSession, testModule); - }; - } - - private static BuildEventsHandler.Factory buildEventsHandlerFactory( - DDTestSessionImpl.SessionImplFactory sessionFactory) { - return new BuildEventsHandler.Factory() { - @Override - public BuildEventsHandler create() { - return new BuildEventsHandlerImpl<>(sessionFactory, new JvmInfoFactory()); - } - }; - } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemModule.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemModule.java new file mode 100644 index 00000000000..fc428da6ffd --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemModule.java @@ -0,0 +1,14 @@ +package datadog.trace.civisibility; + +import datadog.trace.api.civisibility.DDTestModule; +import datadog.trace.api.civisibility.events.BuildEventsHandler; +import datadog.trace.civisibility.ipc.ModuleExecutionResult; + +/** Test module abstraction that is used by build system instrumentations (e.g. Maven, Gradle) */ +public interface DDBuildSystemModule extends DDTestModule { + long getId(); + + BuildEventsHandler.ModuleInfo getModuleInfo(); + + void onModuleExecutionResultReceived(ModuleExecutionResult result); +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemModuleImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemModuleImpl.java new file mode 100644 index 00000000000..e1c2a0714a5 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemModuleImpl.java @@ -0,0 +1,215 @@ +package datadog.trace.civisibility; + +import datadog.trace.api.Config; +import datadog.trace.api.DDTags; +import datadog.trace.api.civisibility.events.BuildEventsHandler; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.Tags; +import datadog.trace.civisibility.codeowners.Codeowners; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; +import datadog.trace.civisibility.coverage.CoverageUtils; +import datadog.trace.civisibility.decorator.TestDecorator; +import datadog.trace.civisibility.ipc.ModuleExecutionResult; +import datadog.trace.civisibility.ipc.TestFramework; +import datadog.trace.civisibility.source.MethodLinesResolver; +import datadog.trace.civisibility.source.SourcePathResolver; +import datadog.trace.civisibility.source.index.RepoIndexProvider; +import datadog.trace.civisibility.utils.SpanUtils; +import java.io.File; +import java.net.InetSocketAddress; +import java.nio.file.Paths; +import java.util.Collection; +import java.util.concurrent.atomic.LongAdder; +import java.util.function.Consumer; +import javax.annotation.Nullable; +import javax.annotation.concurrent.GuardedBy; +import org.jacoco.core.analysis.IBundleCoverage; +import org.jacoco.core.analysis.ICounter; +import org.jacoco.core.data.ExecutionDataStore; + +public class DDBuildSystemModuleImpl extends DDTestModuleImpl implements DDBuildSystemModule { + + private final String repoRoot; + private final InetSocketAddress signalServerAddress; + private final Collection outputClassesDirs; + private final RepoIndexProvider repoIndexProvider; + private final TestModuleRegistry testModuleRegistry; + private final LongAdder testsSkipped = new LongAdder(); + private volatile boolean codeCoverageEnabled; + private volatile boolean itrEnabled; + private final Object coverageDataLock = new Object(); + + @GuardedBy("coverageDataLock") + private ExecutionDataStore coverageData; + + public DDBuildSystemModuleImpl( + AgentSpan.Context sessionSpanContext, + long sessionId, + String moduleName, + String repoRoot, + String startCommand, + @Nullable Long startTime, + InetSocketAddress signalServerAddress, + Collection outputClassesDirs, + Config config, + TestDecorator testDecorator, + SourcePathResolver sourcePathResolver, + Codeowners codeowners, + MethodLinesResolver methodLinesResolver, + CoverageProbeStoreFactory coverageProbeStoreFactory, + RepoIndexProvider repoIndexProvider, + TestModuleRegistry testModuleRegistry, + Consumer onSpanFinish) { + super( + sessionSpanContext, + sessionId, + moduleName, + startTime, + config, + testDecorator, + sourcePathResolver, + codeowners, + methodLinesResolver, + coverageProbeStoreFactory, + onSpanFinish); + this.repoRoot = repoRoot; + this.signalServerAddress = signalServerAddress; + this.outputClassesDirs = outputClassesDirs; + this.repoIndexProvider = repoIndexProvider; + this.testModuleRegistry = testModuleRegistry; + + setTag(Tags.TEST_COMMAND, startCommand); + } + + @Override + public long getId() { + return span.getSpanId(); + } + + @Override + public BuildEventsHandler.ModuleInfo getModuleInfo() { + long moduleId = span.getSpanId(); + String signalServerHost = + signalServerAddress != null ? signalServerAddress.getHostName() : null; + int signalServerPort = signalServerAddress != null ? signalServerAddress.getPort() : 0; + return new BuildEventsHandler.ModuleInfo( + moduleId, sessionId, signalServerHost, signalServerPort); + } + + /** + * Handles module execution results received from a forked JVM. + * + *

Depending on the build configuration it is possible to have multiple forks created for the + * same module: in Gradle this is achieved with {@code maxParallelForks} or {@code forkEvery} + * properties of the Test task, in Maven - with {@code forkCount>} property of the Surefire + * plugin. The forks can execute either concurrently or sequentially. + * + *

Taking this into account, the method should merge execution results rather than overwrite + * them. + * + *

This method is called by the {@link + * datadog.trace.util.AgentThreadFactory.AgentThread#CI_SIGNAL_SERVER} thread. + * + * @param result Module execution results received from a forked JVM. + */ + @Override + public void onModuleExecutionResultReceived(ModuleExecutionResult result) { + if (result.isCoverageEnabled()) { + codeCoverageEnabled = true; + } + if (result.isItrEnabled()) { + itrEnabled = true; + } + + testsSkipped.add(result.getTestsSkippedTotal()); + + // it is important that modules parse their own instances of ExecutionDataStore + // and not share them with the session: + // ExecutionData instances that reside inside the store are mutable, + // and modifying an ExecutionData in one module is going + // to be visible in another module + // (see internal implementation of org.jacoco.core.data.ExecutionDataStore.accept) + ExecutionDataStore coverageData = CoverageUtils.parse(result.getCoverageData()); + if (coverageData != null) { + synchronized (coverageDataLock) { + if (this.coverageData == null) { + this.coverageData = coverageData; + } else { + // merge module coverage data from multiple VMs + coverageData.accept(this.coverageData); + } + } + } + + for (TestFramework testFramework : result.getTestFrameworks()) { + SpanUtils.mergeTag(span, Tags.TEST_FRAMEWORK, testFramework.getName()); + SpanUtils.mergeTag(span, Tags.TEST_FRAMEWORK_VERSION, testFramework.getVersion()); + } + } + + @Override + public void end(@Nullable Long endTime) { + if (codeCoverageEnabled) { + setTag(Tags.TEST_CODE_COVERAGE_ENABLED, true); + } + + if (itrEnabled) { + setTag(Tags.TEST_ITR_TESTS_SKIPPING_ENABLED, true); + setTag(Tags.TEST_ITR_TESTS_SKIPPING_TYPE, "test"); + + long testsSkippedTotal = testsSkipped.sum(); + setTag(Tags.TEST_ITR_TESTS_SKIPPING_COUNT, testsSkippedTotal); + if (testsSkippedTotal > 0) { + setTag(DDTags.CI_ITR_TESTS_SKIPPED, true); + } + } + + synchronized (coverageDataLock) { + if (coverageData != null && !coverageData.getContents().isEmpty()) { + processCoverageData(coverageData); + } + } + + testModuleRegistry.removeModule(this); + + super.end(endTime); + } + + private void processCoverageData(ExecutionDataStore coverageData) { + if (coverageData == null) { + return; + } + IBundleCoverage coverageBundle = + CoverageUtils.createCoverageBundle(coverageData, outputClassesDirs); + if (coverageBundle == null) { + return; + } + + long coveragePercentage = getCoveragePercentage(coverageBundle); + setTag(Tags.TEST_CODE_COVERAGE_LINES_PERCENTAGE, coveragePercentage); + + File coverageReportFolder = getCoverageReportFolder(); + if (coverageReportFolder != null) { + CoverageUtils.dumpCoverageReport( + coverageBundle, repoIndexProvider.getIndex(), repoRoot, coverageReportFolder); + } + } + + private static long getCoveragePercentage(IBundleCoverage coverageBundle) { + ICounter instructionCounter = coverageBundle.getInstructionCounter(); + int totalInstructionsCount = instructionCounter.getTotalCount(); + int coveredInstructionsCount = instructionCounter.getCoveredCount(); + return Math.round((100d * coveredInstructionsCount) / totalInstructionsCount); + } + + private File getCoverageReportFolder() { + String coverageReportDumpDir = config.getCiVisibilityCodeCoverageReportDumpDir(); + if (coverageReportDumpDir != null) { + return Paths.get(coverageReportDumpDir, "session-" + sessionId, moduleName) + .toAbsolutePath() + .toFile(); + } else { + return null; + } + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemSession.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemSession.java new file mode 100644 index 00000000000..09727d30536 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemSession.java @@ -0,0 +1,27 @@ +package datadog.trace.civisibility; + +import datadog.trace.api.civisibility.DDTestSession; +import datadog.trace.api.civisibility.config.ModuleExecutionSettings; +import datadog.trace.civisibility.config.JvmInfo; +import java.io.File; +import java.nio.file.Path; +import java.util.Collection; +import javax.annotation.Nullable; + +/** Test session abstraction that is used by build system instrumentations (e.g. Maven, Gradle) */ +public interface DDBuildSystemSession extends DDTestSession { + + DDBuildSystemModule testModuleStart( + String moduleName, @Nullable Long startTime, Collection outputClassesDirs); + + ModuleExecutionSettings getModuleExecutionSettings(JvmInfo jvmInfo); + + interface Factory { + DDBuildSystemSession startSession( + String projectName, + Path projectRoot, + String startCommand, + String buildSystemName, + Long startTime); + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSessionParent.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemSessionImpl.java similarity index 55% rename from dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSessionParent.java rename to dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemSessionImpl.java index c91d558dee1..200b7a6c570 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSessionParent.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDBuildSystemSessionImpl.java @@ -1,21 +1,15 @@ package datadog.trace.civisibility; -import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; - import datadog.trace.api.Config; import datadog.trace.api.DDTags; -import datadog.trace.api.civisibility.CIConstants; -import datadog.trace.api.civisibility.config.JvmInfo; import datadog.trace.api.civisibility.config.ModuleExecutionSettings; import datadog.trace.api.civisibility.config.SkippableTest; -import datadog.trace.api.civisibility.source.SourcePathResolver; -import datadog.trace.bootstrap.instrumentation.api.AgentSpan; -import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.civisibility.codeowners.Codeowners; +import datadog.trace.civisibility.config.JvmInfo; import datadog.trace.civisibility.config.ModuleExecutionSettingsFactory; -import datadog.trace.civisibility.context.SpanTestContext; -import datadog.trace.civisibility.context.TestContext; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; +import datadog.trace.civisibility.coverage.CoverageUtils; import datadog.trace.civisibility.decorator.TestDecorator; import datadog.trace.civisibility.ipc.ErrorResponse; import datadog.trace.civisibility.ipc.ModuleExecutionResult; @@ -27,31 +21,44 @@ import datadog.trace.civisibility.ipc.SkippableTestsRequest; import datadog.trace.civisibility.ipc.SkippableTestsResponse; import datadog.trace.civisibility.source.MethodLinesResolver; +import datadog.trace.civisibility.source.SourcePathResolver; import datadog.trace.civisibility.source.index.RepoIndex; import datadog.trace.civisibility.source.index.RepoIndexBuilder; +import datadog.trace.civisibility.utils.SpanUtils; +import java.io.File; +import java.nio.file.Paths; import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; import java.util.concurrent.atomic.LongAdder; import javax.annotation.Nullable; - -public class DDTestSessionParent extends DDTestSessionImpl { - - private final AgentSpan span; - private final TestContext context; +import javax.annotation.concurrent.GuardedBy; +import org.jacoco.core.analysis.IBundleCoverage; +import org.jacoco.core.analysis.ICounter; +import org.jacoco.core.data.ExecutionDataStore; + +public class DDBuildSystemSessionImpl extends DDTestSessionImpl implements DDBuildSystemSession { + private final String repoRoot; + private final String startCommand; private final TestModuleRegistry testModuleRegistry; - private final Config config; - private final TestDecorator testDecorator; - private final SourcePathResolver sourcePathResolver; - private final Codeowners codeowners; - private final MethodLinesResolver methodLinesResolver; private final ModuleExecutionSettingsFactory moduleExecutionSettingsFactory; private final SignalServer signalServer; private final RepoIndexBuilder repoIndexBuilder; protected final LongAdder testsSkipped = new LongAdder(); private volatile boolean codeCoverageEnabled; private volatile boolean itrEnabled; + private final Object coverageDataLock = new Object(); + + @GuardedBy("coverageDataLock") + private final ExecutionDataStore coverageData = new ExecutionDataStore(); - public DDTestSessionParent( + @GuardedBy("coverageDataLock") + private final Collection outputClassesDirs = new HashSet<>(); + + public DDBuildSystemSessionImpl( String projectName, + String repoRoot, + String startCommand, @Nullable Long startTime, Config config, TestModuleRegistry testModuleRegistry, @@ -60,34 +67,25 @@ public DDTestSessionParent( Codeowners codeowners, MethodLinesResolver methodLinesResolver, ModuleExecutionSettingsFactory moduleExecutionSettingsFactory, + CoverageProbeStoreFactory coverageProbeStoreFactory, SignalServer signalServer, RepoIndexBuilder repoIndexBuilder) { - this.config = config; + super( + projectName, + startTime, + config, + testDecorator, + sourcePathResolver, + codeowners, + methodLinesResolver, + coverageProbeStoreFactory); + this.repoRoot = repoRoot; + this.startCommand = startCommand; this.testModuleRegistry = testModuleRegistry; - this.testDecorator = testDecorator; - this.sourcePathResolver = sourcePathResolver; - this.codeowners = codeowners; - this.methodLinesResolver = methodLinesResolver; this.moduleExecutionSettingsFactory = moduleExecutionSettingsFactory; this.signalServer = signalServer; this.repoIndexBuilder = repoIndexBuilder; - if (startTime != null) { - span = startSpan(testDecorator.component() + ".test_session", startTime); - } else { - span = startSpan(testDecorator.component() + ".test_session"); - } - - context = new SpanTestContext(span, null); - - span.setSpanType(InternalSpanTypes.TEST_SESSION_END); - span.setTag(Tags.SPAN_KIND, Tags.SPAN_KIND_TEST_SESSION); - span.setTag(Tags.TEST_SESSION_ID, context.getId()); - - span.setResourceName(projectName); - - testDecorator.afterStart(span); - signalServer.registerSignalHandler( SignalType.MODULE_EXECUTION_RESULT, this::onModuleExecutionResultReceived); signalServer.registerSignalHandler( @@ -95,6 +93,8 @@ public DDTestSessionParent( signalServer.registerSignalHandler( SignalType.SKIPPABLE_TESTS_REQUEST, this::onSkippableTestsRequestReceived); signalServer.start(); + + setTag(Tags.TEST_COMMAND, startCommand); } private SignalResponse onModuleExecutionResultReceived(ModuleExecutionResult result) { @@ -104,16 +104,17 @@ private SignalResponse onModuleExecutionResultReceived(ModuleExecutionResult res if (result.isItrEnabled()) { itrEnabled = true; } - String testFramework = result.getTestFramework(); - if (testFramework != null) { - setTag(Tags.TEST_FRAMEWORK, testFramework); - } - String testFrameworkVersion = result.getTestFrameworkVersion(); - if (testFrameworkVersion != null) { - setTag(Tags.TEST_FRAMEWORK_VERSION, testFrameworkVersion); - } testsSkipped.add(result.getTestsSkippedTotal()); + + ExecutionDataStore moduleCoverageData = CoverageUtils.parse(result.getCoverageData()); + if (moduleCoverageData != null) { + synchronized (coverageDataLock) { + // add module coverage data to session coverage data + moduleCoverageData.accept(coverageData); + } + } + return testModuleRegistry.onModuleExecutionResultReceived(result); } @@ -140,37 +141,14 @@ private SignalResponse onSkippableTestsRequestReceived( } } - @Override - public void setTag(String key, Object value) { - span.setTag(key, value); - } - - @Override - public void setErrorInfo(Throwable error) { - span.setError(true); - span.addThrowable(error); - span.setTag(Tags.TEST_STATUS, CIConstants.TEST_FAIL); - } - - @Override - public void setSkipReason(String skipReason) { - span.setTag(Tags.TEST_STATUS, CIConstants.TEST_SKIP); - if (skipReason != null) { - span.setTag(Tags.TEST_SKIP_REASON, skipReason); - } - } - @Override public void end(@Nullable Long endTime) { signalServer.stop(); - String status = context.getStatus(); - span.setTag(Tags.TEST_STATUS, status != null ? status : CIConstants.TEST_SKIP); - testDecorator.beforeFinish(span); - if (codeCoverageEnabled) { setTag(Tags.TEST_CODE_COVERAGE_ENABLED, true); } + if (itrEnabled) { setTag(Tags.TEST_ITR_TESTS_SKIPPING_ENABLED, true); setTag(Tags.TEST_ITR_TESTS_SKIPPING_TYPE, "test"); @@ -182,28 +160,81 @@ public void end(@Nullable Long endTime) { } } - if (endTime != null) { - span.finish(endTime); + synchronized (coverageDataLock) { + if (!coverageData.getContents().isEmpty()) { + processCoverageData(coverageData); + } + } + + super.end(endTime); + } + + private void processCoverageData(ExecutionDataStore coverageData) { + IBundleCoverage coverageBundle = + CoverageUtils.createCoverageBundle(coverageData, outputClassesDirs); + if (coverageBundle == null) { + return; + } + + long coveragePercentage = getCoveragePercentage(coverageBundle); + setTag(Tags.TEST_CODE_COVERAGE_LINES_PERCENTAGE, coveragePercentage); + + File coverageReportFolder = getCoverageReportFolder(); + if (coverageReportFolder != null) { + CoverageUtils.dumpCoverageReport( + coverageBundle, repoIndexBuilder.getIndex(), repoRoot, coverageReportFolder); + } + } + + private static long getCoveragePercentage(IBundleCoverage coverageBundle) { + ICounter instructionCounter = coverageBundle.getInstructionCounter(); + int totalInstructionsCount = instructionCounter.getTotalCount(); + int coveredInstructionsCount = instructionCounter.getCoveredCount(); + return Math.round((100d * coveredInstructionsCount) / totalInstructionsCount); + } + + private File getCoverageReportFolder() { + String coverageReportDumpDir = config.getCiVisibilityCodeCoverageReportDumpDir(); + if (coverageReportDumpDir != null) { + return Paths.get(coverageReportDumpDir, "session-" + span.getSpanId(), "aggregated") + .toAbsolutePath() + .toFile(); } else { - span.finish(); + return null; } } @Override - public DDTestModuleImpl testModuleStart(String moduleName, @Nullable Long startTime) { - DDTestModuleParent module = - new DDTestModuleParent( - context, + public DDBuildSystemModuleImpl testModuleStart(String moduleName, @Nullable Long startTime) { + return testModuleStart(moduleName, startTime, Collections.emptySet()); + } + + @Override + public DDBuildSystemModuleImpl testModuleStart( + String moduleName, @Nullable Long startTime, Collection outputClassesDirs) { + synchronized (coverageDataLock) { + this.outputClassesDirs.addAll(outputClassesDirs); + } + + DDBuildSystemModuleImpl module = + new DDBuildSystemModuleImpl( + span.context(), + span.getSpanId(), moduleName, + repoRoot, + startCommand, startTime, + signalServer.getAddress(), + outputClassesDirs, config, - testModuleRegistry, testDecorator, sourcePathResolver, codeowners, methodLinesResolver, - moduleExecutionSettingsFactory, - signalServer.getAddress()); + coverageProbeStoreFactory, + repoIndexBuilder, + testModuleRegistry, + SpanUtils.propagateCiVisibilityTagsTo(span)); testModuleRegistry.addModule(module); return module; } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkModule.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkModule.java new file mode 100644 index 00000000000..6e2cf4687a3 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkModule.java @@ -0,0 +1,32 @@ +package datadog.trace.civisibility; + +import datadog.trace.api.civisibility.config.SkippableTest; +import javax.annotation.Nullable; + +/** Test module abstraction that is used by test framework instrumentations (e.g. JUnit, TestNG) */ +public interface DDTestFrameworkModule { + DDTestSuiteImpl testSuiteStart( + String testSuiteName, + @Nullable Class testClass, + @Nullable Long startTime, + boolean parallelized); + + /** + * Checks if a given test can be skipped with Intelligent Test Runner or not + * + * @param test Test to be checked + * @return {@code true} if the test can be skipped, {@code false} otherwise + */ + boolean isSkippable(SkippableTest test); + + /** + * Checks if a given test can be skipped with Intelligent Test Runner or not. It the test is + * considered skippable, the count of skippable tests is incremented. + * + * @param test Test to be checked + * @return {@code true} if the test can be skipped, {@code false} otherwise + */ + boolean skip(SkippableTest test); + + void end(Long startTime); +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkModuleImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkModuleImpl.java new file mode 100644 index 00000000000..bd827180c72 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkModuleImpl.java @@ -0,0 +1,107 @@ +package datadog.trace.civisibility; + +import datadog.trace.api.Config; +import datadog.trace.api.DDTags; +import datadog.trace.api.civisibility.config.ModuleExecutionSettings; +import datadog.trace.api.civisibility.config.SkippableTest; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.Tags; +import datadog.trace.civisibility.codeowners.Codeowners; +import datadog.trace.civisibility.config.JvmInfo; +import datadog.trace.civisibility.config.ModuleExecutionSettingsFactory; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; +import datadog.trace.civisibility.decorator.TestDecorator; +import datadog.trace.civisibility.source.MethodLinesResolver; +import datadog.trace.civisibility.source.SourcePathResolver; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.concurrent.atomic.LongAdder; +import java.util.function.Consumer; +import javax.annotation.Nullable; + +/** + * Test module implementation that is used by test framework instrumentations only in cases when the + * build system is NOT instrumented: This class manages the module span since there is no build + * system instrumentation to do it + */ +public class DDTestFrameworkModuleImpl extends DDTestModuleImpl implements DDTestFrameworkModule { + + private final LongAdder testsSkipped = new LongAdder(); + private final Collection skippableTests; + private final boolean codeCoverageEnabled; + private final boolean itrEnabled; + + public DDTestFrameworkModuleImpl( + AgentSpan.Context sessionSpanContext, + long sessionId, + String moduleName, + @Nullable Long startTime, + Config config, + TestDecorator testDecorator, + SourcePathResolver sourcePathResolver, + Codeowners codeowners, + MethodLinesResolver methodLinesResolver, + CoverageProbeStoreFactory coverageProbeStoreFactory, + ModuleExecutionSettingsFactory moduleExecutionSettingsFactory, + Consumer onSpanFinish) { + super( + sessionSpanContext, + sessionId, + moduleName, + startTime, + config, + testDecorator, + sourcePathResolver, + codeowners, + methodLinesResolver, + coverageProbeStoreFactory, + onSpanFinish); + + ModuleExecutionSettings moduleExecutionSettings = + moduleExecutionSettingsFactory.create(JvmInfo.CURRENT_JVM, moduleName); + codeCoverageEnabled = moduleExecutionSettings.isCodeCoverageEnabled(); + itrEnabled = moduleExecutionSettings.isItrEnabled(); + Collection moduleSkippableTests = + moduleExecutionSettings.getSkippableTests(moduleName); + skippableTests = + moduleSkippableTests.size() > 100 + ? new HashSet<>(moduleSkippableTests) + : new ArrayList<>(moduleSkippableTests); + } + + @Override + public boolean isSkippable(SkippableTest test) { + return test != null && skippableTests.contains(test); + } + + @Override + public boolean skip(SkippableTest test) { + if (isSkippable(test)) { + testsSkipped.increment(); + return true; + } else { + return false; + } + } + + @Override + public void end(@Nullable Long endTime) { + if (codeCoverageEnabled) { + setTag(Tags.TEST_CODE_COVERAGE_ENABLED, true); + } + + if (itrEnabled) { + setTag(Tags.TEST_ITR_TESTS_SKIPPING_ENABLED, true); + setTag(Tags.TEST_ITR_TESTS_SKIPPING_TYPE, "test"); + + long testsSkippedTotal = testsSkipped.sum(); + setTag(Tags.TEST_ITR_TESTS_SKIPPING_COUNT, testsSkippedTotal); + if (testsSkippedTotal > 0) { + setTag(DDTags.CI_ITR_TESTS_SKIPPED, true); + } + } + + super.end(endTime); + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkModuleProxy.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkModuleProxy.java new file mode 100644 index 00000000000..f6cf2224685 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkModuleProxy.java @@ -0,0 +1,175 @@ +package datadog.trace.civisibility; + +import datadog.trace.api.Config; +import datadog.trace.api.civisibility.config.SkippableTest; +import datadog.trace.api.civisibility.coverage.CoverageDataSupplier; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.Tags; +import datadog.trace.civisibility.codeowners.Codeowners; +import datadog.trace.civisibility.config.JvmInfo; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; +import datadog.trace.civisibility.decorator.TestDecorator; +import datadog.trace.civisibility.ipc.ModuleExecutionResult; +import datadog.trace.civisibility.ipc.SignalClient; +import datadog.trace.civisibility.ipc.SkippableTestsRequest; +import datadog.trace.civisibility.ipc.SkippableTestsResponse; +import datadog.trace.civisibility.ipc.TestFramework; +import datadog.trace.civisibility.source.MethodLinesResolver; +import datadog.trace.civisibility.source.SourcePathResolver; +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.LongAdder; +import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Test module implementation that is used by test framework instrumentations in those cases when + * the build system IS instrumented: since build system instrumentation manages module spans, this + * class does not do it. Instead, it accumulates module execution data and forwards it to the parent + * process (build system) using the signal server + */ +public class DDTestFrameworkModuleProxy implements DDTestFrameworkModule { + private static final Logger log = LoggerFactory.getLogger(DDTestFrameworkModuleProxy.class); + + private final long parentProcessSessionId; + private final long parentProcessModuleId; + private final String moduleName; + private final InetSocketAddress signalServerAddress; + private final CoverageDataSupplier coverageDataSupplier; + private final Config config; + private final TestDecorator testDecorator; + private final SourcePathResolver sourcePathResolver; + private final Codeowners codeowners; + private final MethodLinesResolver methodLinesResolver; + private final CoverageProbeStoreFactory coverageProbeStoreFactory; + private final LongAdder testsSkipped = new LongAdder(); + private final Collection skippableTests; + private final Collection testFrameworks = ConcurrentHashMap.newKeySet(); + + public DDTestFrameworkModuleProxy( + long parentProcessSessionId, + long parentProcessModuleId, + String moduleName, + Config config, + TestDecorator testDecorator, + SourcePathResolver sourcePathResolver, + Codeowners codeowners, + MethodLinesResolver methodLinesResolver, + CoverageProbeStoreFactory coverageProbeStoreFactory, + CoverageDataSupplier coverageDataSupplier, + @Nullable InetSocketAddress signalServerAddress) { + this.parentProcessSessionId = parentProcessSessionId; + this.parentProcessModuleId = parentProcessModuleId; + this.moduleName = moduleName; + this.signalServerAddress = signalServerAddress; + this.coverageDataSupplier = coverageDataSupplier; + this.config = config; + this.testDecorator = testDecorator; + this.sourcePathResolver = sourcePathResolver; + this.codeowners = codeowners; + this.methodLinesResolver = methodLinesResolver; + this.coverageProbeStoreFactory = coverageProbeStoreFactory; + this.skippableTests = fetchSkippableTests(moduleName, signalServerAddress); + } + + private Collection fetchSkippableTests( + String moduleName, InetSocketAddress signalServerAddress) { + if (!config.isCiVisibilityItrEnabled()) { + return Collections.emptyList(); + } + + SkippableTestsRequest request = new SkippableTestsRequest(moduleName, JvmInfo.CURRENT_JVM); + try (SignalClient signalClient = new SignalClient(signalServerAddress)) { + SkippableTestsResponse response = (SkippableTestsResponse) signalClient.send(request); + Collection moduleSkippableTests = response.getTests(); + log.debug("Received {} skippable tests", moduleSkippableTests.size()); + return moduleSkippableTests.size() > 100 + ? new HashSet<>(moduleSkippableTests) + : new ArrayList<>(moduleSkippableTests); + } catch (Exception e) { + log.error("Error while requesting skippable tests", e); + return Collections.emptySet(); + } + } + + @Override + public boolean isSkippable(SkippableTest test) { + return test != null && skippableTests.contains(test); + } + + @Override + public boolean skip(SkippableTest test) { + if (isSkippable(test)) { + testsSkipped.increment(); + return true; + } else { + return false; + } + } + + @Override + public void end(@Nullable Long endTime) { + // we have no span locally, + // send execution result to parent process that manages the span + sendModuleExecutionResult(); + } + + private void sendModuleExecutionResult() { + boolean coverageEnabled = config.isCiVisibilityCodeCoverageEnabled(); + boolean itrEnabled = config.isCiVisibilityItrEnabled(); + long testsSkippedTotal = testsSkipped.sum(); + byte[] coverageData = coverageDataSupplier.get(); + + ModuleExecutionResult moduleExecutionResult = + new ModuleExecutionResult( + parentProcessSessionId, + parentProcessModuleId, + coverageEnabled, + itrEnabled, + testsSkippedTotal, + testFrameworks, + coverageData); + + try (SignalClient signalClient = new SignalClient(signalServerAddress)) { + signalClient.send(moduleExecutionResult); + } catch (Exception e) { + log.error("Error while reporting module execution result", e); + } + } + + @Override + public DDTestSuiteImpl testSuiteStart( + String testSuiteName, + @Nullable Class testClass, + @Nullable Long startTime, + boolean parallelized) { + return new DDTestSuiteImpl( + null, + parentProcessSessionId, + parentProcessModuleId, + moduleName, + testSuiteName, + testClass, + startTime, + parallelized, + config, + testDecorator, + sourcePathResolver, + codeowners, + methodLinesResolver, + coverageProbeStoreFactory, + this::propagateTestFrameworkData); + } + + private void propagateTestFrameworkData(AgentSpan childSpan) { + testFrameworks.add( + new TestFramework( + (String) childSpan.getTag(Tags.TEST_FRAMEWORK), + (String) childSpan.getTag(Tags.TEST_FRAMEWORK_VERSION))); + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkSession.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkSession.java new file mode 100644 index 00000000000..d41bce5d2dd --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkSession.java @@ -0,0 +1,16 @@ +package datadog.trace.civisibility; + +import java.nio.file.Path; +import javax.annotation.Nullable; + +/** Test session abstraction that is used by test framework instrumentations (e.g. JUnit, TestNG) */ +public interface DDTestFrameworkSession { + void end(Long startTime); + + DDTestFrameworkModule testModuleStart(String moduleName, @Nullable Long startTime); + + interface Factory { + DDTestFrameworkSession startSession( + String projectName, Path projectRoot, String component, Long startTime); + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkSessionImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkSessionImpl.java new file mode 100644 index 00000000000..237c6ea2fb3 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkSessionImpl.java @@ -0,0 +1,61 @@ +package datadog.trace.civisibility; + +import datadog.trace.api.Config; +import datadog.trace.civisibility.codeowners.Codeowners; +import datadog.trace.civisibility.config.ModuleExecutionSettingsFactory; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; +import datadog.trace.civisibility.decorator.TestDecorator; +import datadog.trace.civisibility.source.MethodLinesResolver; +import datadog.trace.civisibility.source.SourcePathResolver; +import datadog.trace.civisibility.utils.SpanUtils; +import javax.annotation.Nullable; + +/** + * Test session implementation that is used by test framework instrumentations only in cases when + * the build system is NOT instrumented. This class manages the session span since there is no build + * system instrumentation to do it + */ +public class DDTestFrameworkSessionImpl extends DDTestSessionImpl + implements DDTestFrameworkSession { + + private final ModuleExecutionSettingsFactory moduleExecutionSettingsFactory; + + public DDTestFrameworkSessionImpl( + String projectName, + @Nullable Long startTime, + Config config, + TestDecorator testDecorator, + SourcePathResolver sourcePathResolver, + Codeowners codeowners, + MethodLinesResolver methodLinesResolver, + CoverageProbeStoreFactory coverageProbeStoreFactory, + ModuleExecutionSettingsFactory moduleExecutionSettingsFactory) { + super( + projectName, + startTime, + config, + testDecorator, + sourcePathResolver, + codeowners, + methodLinesResolver, + coverageProbeStoreFactory); + this.moduleExecutionSettingsFactory = moduleExecutionSettingsFactory; + } + + @Override + public DDTestFrameworkModuleImpl testModuleStart(String moduleName, @Nullable Long startTime) { + return new DDTestFrameworkModuleImpl( + span.context(), + span.getSpanId(), + moduleName, + startTime, + config, + testDecorator, + sourcePathResolver, + codeowners, + methodLinesResolver, + coverageProbeStoreFactory, + moduleExecutionSettingsFactory, + SpanUtils.propagateCiVisibilityTagsTo(span)); + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSessionChild.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkSessionProxy.java similarity index 53% rename from dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSessionChild.java rename to dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkSessionProxy.java index 409c658d409..efbfa55f377 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSessionChild.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestFrameworkSessionProxy.java @@ -1,34 +1,43 @@ package datadog.trace.civisibility; import datadog.trace.api.Config; -import datadog.trace.api.civisibility.config.JvmInfo; -import datadog.trace.api.civisibility.config.ModuleExecutionSettings; -import datadog.trace.api.civisibility.source.SourcePathResolver; +import datadog.trace.api.civisibility.coverage.CoverageDataSupplier; import datadog.trace.civisibility.codeowners.Codeowners; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; import datadog.trace.civisibility.decorator.TestDecorator; import datadog.trace.civisibility.source.MethodLinesResolver; +import datadog.trace.civisibility.source.SourcePathResolver; import java.net.InetSocketAddress; import javax.annotation.Nullable; -public class DDTestSessionChild extends DDTestSessionImpl { +/** + * Test session implementation that is used by test framework instrumentations in those cases when + * the build system IS instrumented: since build system instrumentation manages the session span, + * this class does not do it + */ +public class DDTestFrameworkSessionProxy implements DDTestFrameworkSession { - private final Long parentProcessSessionId; - private final Long parentProcessModuleId; + private final long parentProcessSessionId; + private final long parentProcessModuleId; private final Config config; private final TestDecorator testDecorator; private final SourcePathResolver sourcePathResolver; private final Codeowners codeowners; private final MethodLinesResolver methodLinesResolver; + private final CoverageProbeStoreFactory coverageProbeStoreFactory; + private final CoverageDataSupplier coverageDataSupplier; @Nullable private final InetSocketAddress signalServerAddress; - public DDTestSessionChild( - Long parentProcessSessionId, - Long parentProcessModuleId, + public DDTestFrameworkSessionProxy( + long parentProcessSessionId, + long parentProcessModuleId, Config config, TestDecorator testDecorator, SourcePathResolver sourcePathResolver, Codeowners codeowners, MethodLinesResolver methodLinesResolver, + CoverageProbeStoreFactory coverageProbeStoreFactory, + CoverageDataSupplier coverageDataSupplier, @Nullable InetSocketAddress signalServerAddress) { this.parentProcessSessionId = parentProcessSessionId; this.parentProcessModuleId = parentProcessModuleId; @@ -37,32 +46,19 @@ public DDTestSessionChild( this.sourcePathResolver = sourcePathResolver; this.codeowners = codeowners; this.methodLinesResolver = methodLinesResolver; + this.coverageProbeStoreFactory = coverageProbeStoreFactory; + this.coverageDataSupplier = coverageDataSupplier; this.signalServerAddress = signalServerAddress; } @Override - public void setTag(String key, Object value) { - throw new UnsupportedOperationException("Setting tags is not supported: " + key + ", " + value); - } - - @Override - public void setErrorInfo(Throwable error) { - throw new UnsupportedOperationException("Setting error info is not supported: " + error); - } - - @Override - public void setSkipReason(String skipReason) { - throw new UnsupportedOperationException("Setting skip reason is not supported: " + skipReason); - } - - @Override - public void end(@Nullable Long endTime) { + public void end(Long startTime) { // no op } @Override - public DDTestModuleImpl testModuleStart(String moduleName, @Nullable Long startTime) { - return new DDTestModuleChild( + public DDTestFrameworkModule testModuleStart(String moduleName, @Nullable Long startTime) { + return new DDTestFrameworkModuleProxy( parentProcessSessionId, parentProcessModuleId, moduleName, @@ -71,12 +67,8 @@ public DDTestModuleImpl testModuleStart(String moduleName, @Nullable Long startT sourcePathResolver, codeowners, methodLinesResolver, + coverageProbeStoreFactory, + coverageDataSupplier, signalServerAddress); } - - @Override - public ModuleExecutionSettings getModuleExecutionSettings(JvmInfo jvmInfo) { - throw new UnsupportedOperationException( - "Getting module execution settings is not supported: " + jvmInfo); - } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestImpl.java index d3e85423008..567e40f23c4 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestImpl.java @@ -8,7 +8,6 @@ import datadog.trace.api.civisibility.DDTest; import datadog.trace.api.civisibility.InstrumentationBridge; import datadog.trace.api.civisibility.coverage.CoverageProbeStore; -import datadog.trace.api.civisibility.source.SourcePathResolver; import datadog.trace.api.gateway.RequestContextSlot; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -16,13 +15,14 @@ import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.civisibility.codeowners.Codeowners; -import datadog.trace.civisibility.context.TestContext; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; import datadog.trace.civisibility.decorator.TestDecorator; import datadog.trace.civisibility.source.MethodLinesResolver; +import datadog.trace.civisibility.source.SourcePathResolver; import java.lang.reflect.Method; import java.util.Collection; +import java.util.function.Consumer; import javax.annotation.Nullable; -import org.objectweb.asm.Type; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,27 +31,30 @@ public class DDTestImpl implements DDTest { private static final Logger log = LoggerFactory.getLogger(DDTestImpl.class); private final AgentSpan span; - private final TestContext suiteContext; - private final TestContext moduleContext; - private final TestDecorator testDecorator; + private final long sessionId; + private final long suiteId; + private final Consumer onSpanFinish; public DDTestImpl( - TestContext suiteContext, - TestContext moduleContext, + long sessionId, + long moduleId, + long suiteId, String moduleName, String testSuiteName, String testName, @Nullable Long startTime, @Nullable Class testClass, - @Nullable String testMethodName, @Nullable Method testMethod, Config config, TestDecorator testDecorator, SourcePathResolver sourcePathResolver, MethodLinesResolver methodLinesResolver, - Codeowners codeowners) { - this.suiteContext = suiteContext; - this.moduleContext = moduleContext; + Codeowners codeowners, + CoverageProbeStoreFactory coverageProbeStoreFactory, + Consumer onSpanFinish) { + this.sessionId = sessionId; + this.suiteId = suiteId; + this.onSpanFinish = onSpanFinish; AgentTracer.SpanBuilder spanBuilder = AgentTracer.get() @@ -60,7 +63,7 @@ public DDTestImpl( .asChildOf(null) .withRequestContextData( RequestContextSlot.CI_VISIBILITY, - InstrumentationBridge.createCoverageProbeStore(sourcePathResolver)); + coverageProbeStoreFactory.create(sourcePathResolver)); if (startTime != null) { spanBuilder = spanBuilder.withStartTimestamp(startTime); @@ -79,10 +82,6 @@ public DDTestImpl( span.setTag(Tags.TEST_SUITE, testSuiteName); span.setTag(Tags.TEST_MODULE, moduleName); - Long suiteId = suiteContext.getId(); - Long moduleId = moduleContext.getId(); - Long sessionId = moduleContext.getParentId(); - span.setTag(Tags.TEST_SUITE_ID, suiteId); span.setTag(Tags.TEST_MODULE_ID, moduleId); span.setTag(Tags.TEST_SESSION_ID, sessionId); @@ -92,17 +91,13 @@ public DDTestImpl( if (testClass != null && !testClass.getName().equals(testSuiteName)) { span.setTag(Tags.TEST_SOURCE_CLASS, testClass.getName()); } - if (testMethodName != null && testMethod != null) { - span.setTag(Tags.TEST_SOURCE_METHOD, testMethodName + Type.getMethodDescriptor(testMethod)); - } if (config.isCiVisibilitySourceDataEnabled()) { populateSourceDataTags( span, testClass, testMethod, sourcePathResolver, methodLinesResolver, codeowners); } - this.testDecorator = testDecorator; - this.testDecorator.afterStart(span); + testDecorator.afterStart(span); } private void populateSourceDataTags( @@ -184,14 +179,11 @@ public void end(@Nullable Long endTime) { } CoverageProbeStore probes = span.getRequestContext().getData(RequestContextSlot.CI_VISIBILITY); - probes.report(moduleContext.getParentId(), suiteContext.getId(), span.getSpanId()); + probes.report(sessionId, suiteId, span.getSpanId()); scope.close(); - String status = (String) span.getTag(Tags.TEST_STATUS); - suiteContext.reportChildStatus(status); - - testDecorator.beforeFinish(span); + onSpanFinish.accept(span); if (endTime != null) { span.finish(endTime); diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestModuleChild.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestModuleChild.java deleted file mode 100644 index f0c57d27db1..00000000000 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestModuleChild.java +++ /dev/null @@ -1,120 +0,0 @@ -package datadog.trace.civisibility; - -import datadog.trace.api.Config; -import datadog.trace.api.civisibility.config.JvmInfo; -import datadog.trace.api.civisibility.config.SkippableTest; -import datadog.trace.api.civisibility.source.SourcePathResolver; -import datadog.trace.bootstrap.instrumentation.api.Tags; -import datadog.trace.civisibility.codeowners.Codeowners; -import datadog.trace.civisibility.context.ParentProcessTestContext; -import datadog.trace.civisibility.decorator.TestDecorator; -import datadog.trace.civisibility.ipc.ModuleExecutionResult; -import datadog.trace.civisibility.ipc.SignalClient; -import datadog.trace.civisibility.ipc.SkippableTestsRequest; -import datadog.trace.civisibility.ipc.SkippableTestsResponse; -import datadog.trace.civisibility.source.MethodLinesResolver; -import java.net.InetSocketAddress; -import java.util.Collection; -import java.util.Collections; -import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Representation of a test module in a child process (JVM that is forked by build system to run the - * tests) - */ -public class DDTestModuleChild extends DDTestModuleImpl { - - private static final Logger log = LoggerFactory.getLogger(DDTestModuleChild.class); - - private final ParentProcessTestContext context; - - public DDTestModuleChild( - Long parentProcessSessionId, - Long parentProcessModuleId, - String moduleName, - Config config, - TestDecorator testDecorator, - SourcePathResolver sourcePathResolver, - Codeowners codeowners, - MethodLinesResolver methodLinesResolver, - @Nullable InetSocketAddress signalServerAddress) { - super( - moduleName, - config, - testDecorator, - sourcePathResolver, - codeowners, - methodLinesResolver, - signalServerAddress); - context = new ParentProcessTestContext(parentProcessSessionId, parentProcessModuleId); - } - - @Override - protected ParentProcessTestContext getContext() { - return context; - } - - @Override - public void setTag(String key, Object value) { - throw new UnsupportedOperationException("Setting tags is not supported: " + key + ", " + value); - } - - @Override - public void setErrorInfo(Throwable error) { - throw new UnsupportedOperationException("Setting error info is not supported: " + error); - } - - @Override - public void setSkipReason(String skipReason) { - throw new UnsupportedOperationException("Setting skip reason is not supported: " + skipReason); - } - - @Override - public void end(@Nullable Long endTime) { - // we have no span locally, - // send execution result to parent process that has the span - sendModuleExecutionResult(); - } - - private void sendModuleExecutionResult() { - long moduleId = context.getId(); - long sessionId = context.getParentId(); - boolean coverageEnabled = config.isCiVisibilityCodeCoverageEnabled(); - boolean itrEnabled = config.isCiVisibilityItrEnabled(); - long testsSkippedTotal = testsSkipped.sum(); - String testFramework = String.valueOf(context.getChildTag(Tags.TEST_FRAMEWORK)); - String testFrameworkVersion = String.valueOf(context.getChildTag(Tags.TEST_FRAMEWORK_VERSION)); - - ModuleExecutionResult moduleExecutionResult = - new ModuleExecutionResult( - sessionId, - moduleId, - coverageEnabled, - itrEnabled, - testsSkippedTotal, - testFramework, - testFrameworkVersion); - - try (SignalClient signalClient = new SignalClient(signalServerAddress)) { - signalClient.send(moduleExecutionResult); - } catch (Exception e) { - log.error("Error while reporting module execution result", e); - } - } - - @Override - protected Collection fetchSkippableTests() { - SkippableTestsRequest request = new SkippableTestsRequest(moduleName, JvmInfo.CURRENT_JVM); - try (SignalClient signalClient = new SignalClient(signalServerAddress)) { - SkippableTestsResponse response = (SkippableTestsResponse) signalClient.send(request); - Collection tests = response.getTests(); - log.debug("Received {} skippable tests", tests.size()); - return tests; - } catch (Exception e) { - log.error("Error while requesting skippable tests", e); - return Collections.emptySet(); - } - } -} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestModuleImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestModuleImpl.java index 5f61248efd7..371f0d8a211 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestModuleImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestModuleImpl.java @@ -1,51 +1,109 @@ package datadog.trace.civisibility; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; + import datadog.trace.api.Config; +import datadog.trace.api.civisibility.CIConstants; import datadog.trace.api.civisibility.DDTestModule; -import datadog.trace.api.civisibility.config.SkippableTest; -import datadog.trace.api.civisibility.events.BuildEventsHandler; -import datadog.trace.api.civisibility.source.SourcePathResolver; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; +import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.civisibility.codeowners.Codeowners; -import datadog.trace.civisibility.context.TestContext; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; import datadog.trace.civisibility.decorator.TestDecorator; import datadog.trace.civisibility.source.MethodLinesResolver; -import java.net.InetSocketAddress; -import java.util.Collection; -import java.util.concurrent.atomic.LongAdder; +import datadog.trace.civisibility.source.SourcePathResolver; +import datadog.trace.civisibility.utils.SpanUtils; +import java.util.function.Consumer; import javax.annotation.Nullable; -public abstract class DDTestModuleImpl implements DDTestModule { +public class DDTestModuleImpl implements DDTestModule { + protected final AgentSpan span; + protected final long sessionId; protected final String moduleName; protected final Config config; protected final TestDecorator testDecorator; protected final SourcePathResolver sourcePathResolver; protected final Codeowners codeowners; protected final MethodLinesResolver methodLinesResolver; - @Nullable protected final InetSocketAddress signalServerAddress; - - protected final LongAdder testsSkipped = new LongAdder(); - private volatile Collection skippableTests; - private final Object skippableTestsInitLock = new Object(); + protected final CoverageProbeStoreFactory coverageProbeStoreFactory; + private final Consumer onSpanFinish; - protected DDTestModuleImpl( + public DDTestModuleImpl( + AgentSpan.Context sessionSpanContext, + long sessionId, String moduleName, + @Nullable Long startTime, Config config, TestDecorator testDecorator, SourcePathResolver sourcePathResolver, Codeowners codeowners, MethodLinesResolver methodLinesResolver, - InetSocketAddress signalServerAddress) { + CoverageProbeStoreFactory coverageProbeStoreFactory, + Consumer onSpanFinish) { + this.sessionId = sessionId; this.moduleName = moduleName; this.config = config; this.testDecorator = testDecorator; this.sourcePathResolver = sourcePathResolver; this.codeowners = codeowners; this.methodLinesResolver = methodLinesResolver; - this.signalServerAddress = signalServerAddress; + this.coverageProbeStoreFactory = coverageProbeStoreFactory; + this.onSpanFinish = onSpanFinish; + + if (startTime != null) { + span = startSpan(testDecorator.component() + ".test_module", sessionSpanContext, startTime); + } else { + span = startSpan(testDecorator.component() + ".test_module", sessionSpanContext); + } + + span.setSpanType(InternalSpanTypes.TEST_MODULE_END); + span.setTag(Tags.SPAN_KIND, Tags.SPAN_KIND_TEST_MODULE); + + span.setResourceName(moduleName); + span.setTag(Tags.TEST_MODULE, moduleName); + + span.setTag(Tags.TEST_MODULE_ID, span.getSpanId()); + span.setTag(Tags.TEST_SESSION_ID, sessionId); + + // setting status to skip initially, + // as we do not know in advance whether the module will have any children + span.setTag(Tags.TEST_STATUS, CIConstants.TEST_SKIP); + + testDecorator.afterStart(span); + } + + @Override + public void setTag(String key, Object value) { + span.setTag(key, value); + } + + @Override + public void setErrorInfo(Throwable error) { + span.setError(true); + span.addThrowable(error); + span.setTag(Tags.TEST_STATUS, CIConstants.TEST_FAIL); } - protected abstract TestContext getContext(); + @Override + public void setSkipReason(String skipReason) { + span.setTag(Tags.TEST_STATUS, CIConstants.TEST_SKIP); + if (skipReason != null) { + span.setTag(Tags.TEST_SKIP_REASON, skipReason); + } + } + + @Override + public void end(@Nullable Long endTime) { + onSpanFinish.accept(span); + + if (endTime != null) { + span.finish(endTime); + } else { + span.finish(); + } + } @Override public DDTestSuiteImpl testSuiteStart( @@ -54,55 +112,20 @@ public DDTestSuiteImpl testSuiteStart( @Nullable Long startTime, boolean parallelized) { return new DDTestSuiteImpl( - getContext(), + span.context(), + sessionId, + span.getSpanId(), moduleName, testSuiteName, testClass, startTime, + parallelized, config, testDecorator, sourcePathResolver, codeowners, methodLinesResolver, - parallelized); - } - - @Override - public boolean skip(SkippableTest test) { - if (test == null) { - return false; - } - - if (skippableTests == null) { - synchronized (skippableTestsInitLock) { - if (skippableTests == null) { - skippableTests = fetchSkippableTests(); - } - } - } - - if (skippableTests.contains(test)) { - testsSkipped.increment(); - return true; - } else { - return false; - } - } - - protected abstract Collection fetchSkippableTests(); - - public BuildEventsHandler.ModuleInfo getModuleInfo() { - TestContext context = getContext(); - Long moduleId = context.getId(); - Long sessionId = context.getParentId(); - String signalServerHost = - signalServerAddress != null ? signalServerAddress.getHostName() : null; - int signalServerPort = signalServerAddress != null ? signalServerAddress.getPort() : 0; - return new BuildEventsHandler.ModuleInfo( - moduleId, sessionId, signalServerHost, signalServerPort); - } - - public long getId() { - return getContext().getId(); + coverageProbeStoreFactory, + SpanUtils.propagateCiVisibilityTagsTo(span)); } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestModuleParent.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestModuleParent.java deleted file mode 100644 index b82d63bcf29..00000000000 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestModuleParent.java +++ /dev/null @@ -1,180 +0,0 @@ -package datadog.trace.civisibility; - -import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; - -import datadog.trace.api.Config; -import datadog.trace.api.DDTags; -import datadog.trace.api.civisibility.CIConstants; -import datadog.trace.api.civisibility.config.JvmInfo; -import datadog.trace.api.civisibility.config.ModuleExecutionSettings; -import datadog.trace.api.civisibility.config.SkippableTest; -import datadog.trace.api.civisibility.source.SourcePathResolver; -import datadog.trace.api.config.CiVisibilityConfig; -import datadog.trace.bootstrap.instrumentation.api.AgentSpan; -import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; -import datadog.trace.bootstrap.instrumentation.api.Tags; -import datadog.trace.civisibility.codeowners.Codeowners; -import datadog.trace.civisibility.config.ModuleExecutionSettingsFactory; -import datadog.trace.civisibility.context.SpanTestContext; -import datadog.trace.civisibility.context.TestContext; -import datadog.trace.civisibility.decorator.TestDecorator; -import datadog.trace.civisibility.ipc.ModuleExecutionResult; -import datadog.trace.civisibility.source.MethodLinesResolver; -import java.net.InetSocketAddress; -import java.util.Collection; -import java.util.HashSet; -import java.util.Map; -import javax.annotation.Nullable; - -/** - * Representation of a test module in a parent process: - * - *

    - *
  • JVM that runs the build system, if build system is instrumented - *
  • JVM that runs the tests, if build system is not instrumented - *
- */ -public class DDTestModuleParent extends DDTestModuleImpl { - - private final AgentSpan span; - private final SpanTestContext context; - private final TestContext sessionContext; - private final TestModuleRegistry testModuleRegistry; - private final ModuleExecutionSettingsFactory moduleExecutionSettingsFactory; - private volatile boolean codeCoverageEnabled; - private volatile boolean itrEnabled; - - public DDTestModuleParent( - TestContext sessionContext, - String moduleName, - @Nullable Long startTime, - Config config, - TestModuleRegistry testModuleRegistry, - TestDecorator testDecorator, - SourcePathResolver sourcePathResolver, - Codeowners codeowners, - MethodLinesResolver methodLinesResolver, - ModuleExecutionSettingsFactory moduleExecutionSettingsFactory, - @Nullable InetSocketAddress signalServerAddress) { - super( - moduleName, - config, - testDecorator, - sourcePathResolver, - codeowners, - methodLinesResolver, - signalServerAddress); - this.sessionContext = sessionContext; - this.testModuleRegistry = testModuleRegistry; - this.moduleExecutionSettingsFactory = moduleExecutionSettingsFactory; - - AgentSpan sessionSpan = sessionContext.getSpan(); - AgentSpan.Context sessionSpanContext = sessionSpan != null ? sessionSpan.context() : null; - - if (startTime != null) { - span = startSpan(testDecorator.component() + ".test_module", sessionSpanContext, startTime); - } else { - span = startSpan(testDecorator.component() + ".test_module", sessionSpanContext); - } - - context = new SpanTestContext(span, sessionContext); - - span.setSpanType(InternalSpanTypes.TEST_MODULE_END); - span.setTag(Tags.SPAN_KIND, Tags.SPAN_KIND_TEST_MODULE); - - span.setResourceName(moduleName); - span.setTag(Tags.TEST_MODULE, moduleName); - - span.setTag(Tags.TEST_MODULE_ID, context.getId()); - span.setTag(Tags.TEST_SESSION_ID, sessionContext.getId()); - - testDecorator.afterStart(span); - } - - @Override - protected SpanTestContext getContext() { - return context; - } - - @Override - public void setTag(String key, Object value) { - span.setTag(key, value); - } - - @Override - public void setErrorInfo(Throwable error) { - span.setError(true); - span.addThrowable(error); - span.setTag(Tags.TEST_STATUS, CIConstants.TEST_FAIL); - } - - @Override - public void setSkipReason(String skipReason) { - span.setTag(Tags.TEST_STATUS, CIConstants.TEST_SKIP); - if (skipReason != null) { - span.setTag(Tags.TEST_SKIP_REASON, skipReason); - } - } - - @Override - public void end(@Nullable Long endTime) { - testModuleRegistry.removeModule(this); - - span.setTag(Tags.TEST_STATUS, context.getStatus()); - sessionContext.reportChildStatus(context.getStatus()); - - if (codeCoverageEnabled) { - setTag(Tags.TEST_CODE_COVERAGE_ENABLED, true); - } - if (itrEnabled) { - setTag(Tags.TEST_ITR_TESTS_SKIPPING_ENABLED, true); - setTag(Tags.TEST_ITR_TESTS_SKIPPING_TYPE, "test"); - - long testsSkippedTotal = testsSkipped.sum(); - setTag(Tags.TEST_ITR_TESTS_SKIPPING_COUNT, testsSkippedTotal); - if (testsSkippedTotal > 0) { - setTag(DDTags.CI_ITR_TESTS_SKIPPED, true); - } - } - - testDecorator.beforeFinish(span); - - if (endTime != null) { - span.finish(endTime); - } else { - span.finish(); - } - } - - @Override - protected Collection fetchSkippableTests() { - ModuleExecutionSettings moduleExecutionSettings = - moduleExecutionSettingsFactory.create(JvmInfo.CURRENT_JVM, moduleName); - Map systemProperties = moduleExecutionSettings.getSystemProperties(); - codeCoverageEnabled = - propertyEnabled(systemProperties, CiVisibilityConfig.CIVISIBILITY_CODE_COVERAGE_ENABLED); - itrEnabled = propertyEnabled(systemProperties, CiVisibilityConfig.CIVISIBILITY_ITR_ENABLED); - return new HashSet<>(moduleExecutionSettings.getSkippableTests(moduleName)); - } - - private boolean propertyEnabled(Map systemProperties, String propertyName) { - String property = systemProperties.get(propertyName); - return Boolean.parseBoolean(property); - } - - public void onModuleExecutionResultReceived(ModuleExecutionResult result) { - codeCoverageEnabled = result.isCoverageEnabled(); - itrEnabled = result.isItrEnabled(); - testsSkipped.add(result.getTestsSkippedTotal()); - - String testFramework = result.getTestFramework(); - if (testFramework != null) { - span.setTag(Tags.TEST_FRAMEWORK, testFramework); - } - - String testFrameworkVersion = result.getTestFrameworkVersion(); - if (testFrameworkVersion != null) { - span.setTag(Tags.TEST_FRAMEWORK_VERSION, testFrameworkVersion); - } - } -} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSessionImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSessionImpl.java index ca0a2558342..874234b86b6 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSessionImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSessionImpl.java @@ -1,20 +1,115 @@ package datadog.trace.civisibility; -import datadog.trace.api.civisibility.CIVisibility; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; + +import datadog.trace.api.Config; +import datadog.trace.api.civisibility.CIConstants; import datadog.trace.api.civisibility.DDTestSession; -import datadog.trace.api.civisibility.config.JvmInfo; -import datadog.trace.api.civisibility.config.ModuleExecutionSettings; -import java.nio.file.Path; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; +import datadog.trace.bootstrap.instrumentation.api.Tags; +import datadog.trace.civisibility.codeowners.Codeowners; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; +import datadog.trace.civisibility.decorator.TestDecorator; +import datadog.trace.civisibility.source.MethodLinesResolver; +import datadog.trace.civisibility.source.SourcePathResolver; +import datadog.trace.civisibility.utils.SpanUtils; import javax.annotation.Nullable; -public abstract class DDTestSessionImpl implements DDTestSession { +public class DDTestSessionImpl implements DDTestSession { + protected final AgentSpan span; + protected final Config config; + protected final TestDecorator testDecorator; + protected final SourcePathResolver sourcePathResolver; + protected final Codeowners codeowners; + protected final MethodLinesResolver methodLinesResolver; + protected final CoverageProbeStoreFactory coverageProbeStoreFactory; + + public DDTestSessionImpl( + String projectName, + @Nullable Long startTime, + Config config, + TestDecorator testDecorator, + SourcePathResolver sourcePathResolver, + Codeowners codeowners, + MethodLinesResolver methodLinesResolver, + CoverageProbeStoreFactory coverageProbeStoreFactory) { + this.config = config; + this.testDecorator = testDecorator; + this.sourcePathResolver = sourcePathResolver; + this.codeowners = codeowners; + this.methodLinesResolver = methodLinesResolver; + this.coverageProbeStoreFactory = coverageProbeStoreFactory; + + if (startTime != null) { + span = startSpan(testDecorator.component() + ".test_session", startTime); + } else { + span = startSpan(testDecorator.component() + ".test_session"); + } + + span.setSpanType(InternalSpanTypes.TEST_SESSION_END); + span.setTag(Tags.SPAN_KIND, Tags.SPAN_KIND_TEST_SESSION); + span.setTag(Tags.TEST_SESSION_ID, span.getSpanId()); - public abstract DDTestModuleImpl testModuleStart(String moduleName, @Nullable Long startTime); + // setting status to skip initially, + // as we do not know in advance whether the session will have any children + span.setTag(Tags.TEST_STATUS, CIConstants.TEST_SKIP); + + span.setResourceName(projectName); + + // The backend requires all session spans to have the test command tag + // because it is used for session fingerprint calculation. + // We're setting it here to project name as a default that works + // reasonably well (although this is not the real command). + // In those cases where proper command can be determined, + // this tag will be overridden + span.setTag(Tags.TEST_COMMAND, projectName); + + testDecorator.afterStart(span); + } - public abstract ModuleExecutionSettings getModuleExecutionSettings(JvmInfo jvmInfo); + @Override + public void setTag(String key, Object value) { + span.setTag(key, value); + } + + @Override + public void setErrorInfo(Throwable error) { + span.setError(true); + span.addThrowable(error); + span.setTag(Tags.TEST_STATUS, CIConstants.TEST_FAIL); + } + + @Override + public void setSkipReason(String skipReason) { + span.setTag(Tags.TEST_STATUS, CIConstants.TEST_SKIP); + if (skipReason != null) { + span.setTag(Tags.TEST_SKIP_REASON, skipReason); + } + } + + @Override + public void end(@Nullable Long endTime) { + if (endTime != null) { + span.finish(endTime); + } else { + span.finish(); + } + } - public interface SessionImplFactory extends CIVisibility.SessionFactory { - DDTestSessionImpl startSession( - String projectName, Path projectRoot, String component, Long startTime); + @Override + public DDTestModuleImpl testModuleStart(String moduleName, @Nullable Long startTime) { + return new DDTestModuleImpl( + span.context(), + span.getSpanId(), + moduleName, + startTime, + config, + testDecorator, + sourcePathResolver, + codeowners, + methodLinesResolver, + coverageProbeStoreFactory, + SpanUtils.propagateCiVisibilityTagsTo(span)); } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSuiteImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSuiteImpl.java index d0d693d0a16..b170d976dfb 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSuiteImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/DDTestSuiteImpl.java @@ -6,59 +6,66 @@ import datadog.trace.api.Config; import datadog.trace.api.civisibility.CIConstants; import datadog.trace.api.civisibility.DDTestSuite; -import datadog.trace.api.civisibility.source.SourcePathResolver; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.civisibility.codeowners.Codeowners; -import datadog.trace.civisibility.context.SpanTestContext; -import datadog.trace.civisibility.context.TestContext; +import datadog.trace.civisibility.coverage.CoverageProbeStoreFactory; import datadog.trace.civisibility.decorator.TestDecorator; import datadog.trace.civisibility.source.MethodLinesResolver; +import datadog.trace.civisibility.source.SourcePathResolver; +import datadog.trace.civisibility.utils.SpanUtils; import java.lang.reflect.Method; +import java.util.function.Consumer; import javax.annotation.Nullable; public class DDTestSuiteImpl implements DDTestSuite { + private final AgentSpan span; + private final long sessionId; + private final long moduleId; private final String moduleName; private final String testSuiteName; private final Class testClass; - private final AgentSpan span; - private final TestContext context; - private final TestContext moduleContext; private final Config config; private final TestDecorator testDecorator; private final SourcePathResolver sourcePathResolver; private final Codeowners codeowners; private final MethodLinesResolver methodLinesResolver; + private final CoverageProbeStoreFactory coverageProbeStoreFactory; private final boolean parallelized; + private final Consumer onSpanFinish; public DDTestSuiteImpl( - TestContext moduleContext, + @Nullable AgentSpan.Context moduleSpanContext, + long sessionId, + long moduleId, String moduleName, String testSuiteName, @Nullable Class testClass, @Nullable Long startTime, + boolean parallelized, Config config, TestDecorator testDecorator, SourcePathResolver sourcePathResolver, Codeowners codeowners, MethodLinesResolver methodLinesResolver, - boolean parallelized) { + CoverageProbeStoreFactory coverageProbeStoreFactory, + Consumer onSpanFinish) { + this.sessionId = sessionId; + this.moduleId = moduleId; this.moduleName = moduleName; - this.moduleContext = moduleContext; this.testSuiteName = testSuiteName; + this.parallelized = parallelized; this.config = config; this.testDecorator = testDecorator; this.sourcePathResolver = sourcePathResolver; this.codeowners = codeowners; this.methodLinesResolver = methodLinesResolver; - this.parallelized = parallelized; - - AgentSpan moduleSpan = this.moduleContext.getSpan(); - AgentSpan.Context moduleSpanContext = moduleSpan != null ? moduleSpan.context() : null; + this.coverageProbeStoreFactory = coverageProbeStoreFactory; + this.onSpanFinish = onSpanFinish; if (startTime != null) { span = startSpan(testDecorator.component() + ".test_suite", moduleSpanContext, startTime); @@ -66,8 +73,6 @@ public DDTestSuiteImpl( span = startSpan(testDecorator.component() + ".test_suite", moduleSpanContext); } - context = new SpanTestContext(span, moduleContext); - span.setSpanType(InternalSpanTypes.TEST_SUITE_END); span.setTag(Tags.SPAN_KIND, Tags.SPAN_KIND_TEST_SUITE); @@ -75,9 +80,13 @@ public DDTestSuiteImpl( span.setTag(Tags.TEST_SUITE, testSuiteName); span.setTag(Tags.TEST_MODULE, moduleName); - span.setTag(Tags.TEST_SUITE_ID, context.getId()); - span.setTag(Tags.TEST_MODULE_ID, moduleContext.getId()); - span.setTag(Tags.TEST_SESSION_ID, moduleContext.getParentId()); + span.setTag(Tags.TEST_SUITE_ID, span.getSpanId()); + span.setTag(Tags.TEST_MODULE_ID, moduleId); + span.setTag(Tags.TEST_SESSION_ID, sessionId); + + // setting status to skip initially, + // as we do not know in advance whether the suite will have any children + span.setTag(Tags.TEST_STATUS, CIConstants.TEST_SKIP); this.testClass = testClass; if (this.testClass != null) { @@ -139,52 +148,34 @@ public void end(@Nullable Long endTime) { scope.close(); } - testDecorator.beforeFinish(span); + onSpanFinish.accept(span); - String status = context.getStatus(); - if (status != null) { - // do not report test suite if no execution took place - span.setTag(Tags.TEST_STATUS, status); - moduleContext.reportChildStatus(status); - - if (endTime != null) { - span.finish(endTime); - } else { - span.finish(); - } + if (endTime != null) { + span.finish(endTime); + } else { + span.finish(); } - - moduleContext.reportChildTag(Tags.TEST_FRAMEWORK, span.getTag(Tags.TEST_FRAMEWORK)); - moduleContext.reportChildTag( - Tags.TEST_FRAMEWORK_VERSION, span.getTag(Tags.TEST_FRAMEWORK_VERSION)); } @Override public DDTestImpl testStart( String testName, @Nullable Method testMethod, @Nullable Long startTime) { - return testStart( - testName, testMethod != null ? testMethod.getName() : null, testMethod, startTime); - } - - public DDTestImpl testStart( - String testName, - @Nullable String methodName, - @Nullable Method testMethod, - @Nullable Long startTime) { return new DDTestImpl( - context, - moduleContext, + sessionId, + moduleId, + span.getSpanId(), moduleName, testSuiteName, testName, startTime, testClass, - methodName, testMethod, config, testDecorator, sourcePathResolver, methodLinesResolver, - codeowners); + codeowners, + coverageProbeStoreFactory, + SpanUtils.propagateCiVisibilityTagsTo(span)); } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/TestModuleRegistry.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/TestModuleRegistry.java index f9876b6900d..a609c93bddf 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/TestModuleRegistry.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/TestModuleRegistry.java @@ -13,23 +13,23 @@ public class TestModuleRegistry { private static final Logger LOGGER = LoggerFactory.getLogger(TestModuleRegistry.class); - private final Map testModuleById; + private final Map testModuleById; public TestModuleRegistry() { this.testModuleById = new ConcurrentHashMap<>(); } - public void addModule(DDTestModuleParent module) { + public void addModule(DDBuildSystemModule module) { testModuleById.put(module.getId(), module); } - public void removeModule(DDTestModuleParent module) { + public void removeModule(DDBuildSystemModule module) { testModuleById.remove(module.getId()); } public SignalResponse onModuleExecutionResultReceived(ModuleExecutionResult result) { long moduleId = result.getModuleId(); - DDTestModuleParent module = testModuleById.get(moduleId); + DDBuildSystemModule module = testModuleById.get(moduleId); if (module == null) { String message = String.format( @@ -38,6 +38,7 @@ public SignalResponse onModuleExecutionResultReceived(ModuleExecutionResult resu LOGGER.warn(message); return new ErrorResponse(message); } + module.onModuleExecutionResultReceived(result); return AckResponse.INSTANCE; } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AppVeyorInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AppVeyorInfo.java index 5170d93337f..5b98050f7a2 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AppVeyorInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AppVeyorInfo.java @@ -2,7 +2,7 @@ import static datadog.trace.api.git.GitUtils.normalizeBranch; import static datadog.trace.api.git.GitUtils.normalizeTag; -import static datadog.trace.civisibility.utils.PathUtils.expandTilde; +import static datadog.trace.civisibility.utils.FileUtils.expandTilde; import datadog.trace.api.git.CommitInfo; import datadog.trace.api.git.GitInfo; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AwsCodePipelineInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AwsCodePipelineInfo.java new file mode 100644 index 00000000000..4a0a9d299e6 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AwsCodePipelineInfo.java @@ -0,0 +1,29 @@ +package datadog.trace.civisibility.ci; + +import datadog.trace.api.git.GitInfo; + +class AwsCodePipelineInfo implements CIProviderInfo { + + public static final String AWS_CODEPIPELINE = "CODEBUILD_INITIATOR"; + public static final String AWS_CODEPIPELINE_PROVIDER_NAME = "awscodepipeline"; + public static final String AWS_CODEPIPELINE_EXECUTION_ID = "DD_PIPELINE_EXECUTION_ID"; + public static final String AWS_CODEPIPELINE_ACTION_EXECUTION_ID = "DD_ACTION_EXECUTION_ID"; + public static final String AWS_CODEPIPELINE_ARN = "CODEBUILD_BUILD_ARN"; + + @Override + public GitInfo buildCIGitInfo() { + return GitInfo.NOOP; + } + + @Override + public CIInfo buildCIInfo() { + return CIInfo.builder() + .ciProviderName(AWS_CODEPIPELINE_PROVIDER_NAME) + .ciPipelineId(System.getenv(AWS_CODEPIPELINE_EXECUTION_ID)) + .ciEnvVars( + AWS_CODEPIPELINE_EXECUTION_ID, + AWS_CODEPIPELINE_ACTION_EXECUTION_ID, + AWS_CODEPIPELINE_ARN) + .build(); + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AzurePipelinesInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AzurePipelinesInfo.java index d24634e0d36..7e2a454411a 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AzurePipelinesInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/AzurePipelinesInfo.java @@ -4,7 +4,7 @@ import static datadog.trace.api.git.GitUtils.isTagReference; import static datadog.trace.api.git.GitUtils.normalizeBranch; import static datadog.trace.api.git.GitUtils.normalizeTag; -import static datadog.trace.civisibility.utils.PathUtils.expandTilde; +import static datadog.trace.civisibility.utils.FileUtils.expandTilde; import datadog.trace.api.git.CommitInfo; import datadog.trace.api.git.GitInfo; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BitBucketInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BitBucketInfo.java index 9a8737ddc9a..544ea1f951c 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BitBucketInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BitBucketInfo.java @@ -3,7 +3,7 @@ import static datadog.trace.api.git.GitUtils.filterSensitiveInfo; import static datadog.trace.api.git.GitUtils.normalizeBranch; import static datadog.trace.api.git.GitUtils.normalizeTag; -import static datadog.trace.civisibility.utils.PathUtils.expandTilde; +import static datadog.trace.civisibility.utils.FileUtils.expandTilde; import datadog.trace.api.git.CommitInfo; import datadog.trace.api.git.GitInfo; @@ -19,6 +19,7 @@ class BitBucketInfo implements CIProviderInfo { public static final String BITBUCKET_BUILD_NUMBER = "BITBUCKET_BUILD_NUMBER"; public static final String BITBUCKET_WORKSPACE_PATH = "BITBUCKET_CLONE_DIR"; public static final String BITBUCKET_GIT_REPOSITORY_URL = "BITBUCKET_GIT_SSH_ORIGIN"; + public static final String BITBUCKET_HTTPS_REPOSITORY_URL = "BITBUCKET_GIT_HTTP_ORIGIN"; public static final String BITBUCKET_GIT_COMMIT = "BITBUCKET_COMMIT"; public static final String BITBUCKET_GIT_BRANCH = "BITBUCKET_BRANCH"; public static final String BITBUCKET_GIT_TAG = "BITBUCKET_TAG"; @@ -26,12 +27,24 @@ class BitBucketInfo implements CIProviderInfo { @Override public GitInfo buildCIGitInfo() { return new GitInfo( - filterSensitiveInfo(System.getenv(BITBUCKET_GIT_REPOSITORY_URL)), + getRepositoryURL(), normalizeBranch(System.getenv(BITBUCKET_GIT_BRANCH)), normalizeTag(System.getenv(BITBUCKET_GIT_TAG)), new CommitInfo(System.getenv(BITBUCKET_GIT_COMMIT))); } + private static String getRepositoryURL() { + String gitRepoUrl = System.getenv(BITBUCKET_GIT_REPOSITORY_URL); + if (Strings.isNotBlank(gitRepoUrl)) { + return filterSensitiveInfo(gitRepoUrl); + } + String httpsRepoUrl = System.getenv(BITBUCKET_HTTPS_REPOSITORY_URL); + if (Strings.isNotBlank(httpsRepoUrl)) { + return filterSensitiveInfo(httpsRepoUrl); + } + return null; + } + @Override public CIInfo buildCIInfo() { final String repo = System.getenv(BITBUCKET_REPO_FULL_NAME); diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BitriseInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BitriseInfo.java index 7e46258e057..230ac389db1 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BitriseInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BitriseInfo.java @@ -3,7 +3,7 @@ import static datadog.trace.api.git.GitUtils.filterSensitiveInfo; import static datadog.trace.api.git.GitUtils.normalizeBranch; import static datadog.trace.api.git.GitUtils.normalizeTag; -import static datadog.trace.civisibility.utils.PathUtils.expandTilde; +import static datadog.trace.civisibility.utils.FileUtils.expandTilde; import datadog.trace.api.git.CommitInfo; import datadog.trace.api.git.GitInfo; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BuildkiteInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BuildkiteInfo.java index 748d4805773..86bd3b5d5d4 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BuildkiteInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/BuildkiteInfo.java @@ -3,7 +3,7 @@ import static datadog.trace.api.git.GitUtils.filterSensitiveInfo; import static datadog.trace.api.git.GitUtils.normalizeBranch; import static datadog.trace.api.git.GitUtils.normalizeTag; -import static datadog.trace.civisibility.utils.PathUtils.expandTilde; +import static datadog.trace.civisibility.utils.FileUtils.expandTilde; import static datadog.trace.util.Strings.toJson; import datadog.trace.api.git.CommitInfo; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/CIProviderInfoFactory.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/CIProviderInfoFactory.java index f8356d57a02..37a651678b7 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/CIProviderInfoFactory.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/CIProviderInfoFactory.java @@ -51,6 +51,9 @@ public CIProviderInfo createCIProviderInfo(Path currentPath) { return new CodefreshInfo(); } else if (System.getenv(TeamcityInfo.TEAMCITY) != null) { return new TeamcityInfo(); + } else if (System.getenv(AwsCodePipelineInfo.AWS_CODEPIPELINE) != null + && System.getenv(AwsCodePipelineInfo.AWS_CODEPIPELINE).startsWith("codepipeline")) { + return new AwsCodePipelineInfo(); } else { return new UnknownCIInfo(targetFolder, currentPath); } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/CircleCIInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/CircleCIInfo.java index d009db0df52..cf3234ac9ae 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/CircleCIInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/CircleCIInfo.java @@ -3,7 +3,7 @@ import static datadog.trace.api.git.GitUtils.filterSensitiveInfo; import static datadog.trace.api.git.GitUtils.normalizeBranch; import static datadog.trace.api.git.GitUtils.normalizeTag; -import static datadog.trace.civisibility.utils.PathUtils.expandTilde; +import static datadog.trace.civisibility.utils.FileUtils.expandTilde; import datadog.trace.api.git.CommitInfo; import datadog.trace.api.git.GitInfo; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/GitLabInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/GitLabInfo.java index d7cbdcce657..40a69ac5a56 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/GitLabInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/GitLabInfo.java @@ -3,7 +3,7 @@ import static datadog.trace.api.git.GitUtils.filterSensitiveInfo; import static datadog.trace.api.git.GitUtils.normalizeBranch; import static datadog.trace.api.git.GitUtils.normalizeTag; -import static datadog.trace.civisibility.utils.PathUtils.expandTilde; +import static datadog.trace.civisibility.utils.FileUtils.expandTilde; import datadog.trace.api.git.CommitInfo; import datadog.trace.api.git.GitInfo; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/GithubActionsInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/GithubActionsInfo.java index 945ecdf3059..2dabe00a5e4 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/GithubActionsInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/GithubActionsInfo.java @@ -3,7 +3,7 @@ import static datadog.trace.api.git.GitUtils.isTagReference; import static datadog.trace.api.git.GitUtils.normalizeBranch; import static datadog.trace.api.git.GitUtils.normalizeTag; -import static datadog.trace.civisibility.utils.PathUtils.expandTilde; +import static datadog.trace.civisibility.utils.FileUtils.expandTilde; import datadog.trace.api.git.CommitInfo; import datadog.trace.api.git.GitInfo; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/JenkinsInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/JenkinsInfo.java index f2d1ee5c0ee..9d0aea215fe 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/JenkinsInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/JenkinsInfo.java @@ -4,7 +4,7 @@ import static datadog.trace.api.git.GitUtils.isTagReference; import static datadog.trace.api.git.GitUtils.normalizeBranch; import static datadog.trace.api.git.GitUtils.normalizeTag; -import static datadog.trace.civisibility.utils.PathUtils.expandTilde; +import static datadog.trace.civisibility.utils.FileUtils.expandTilde; import datadog.trace.api.git.CommitInfo; import datadog.trace.api.git.GitInfo; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/TravisInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/TravisInfo.java index 9c9b4af0e8e..3d311e5ea85 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/TravisInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/TravisInfo.java @@ -2,7 +2,7 @@ import static datadog.trace.api.git.GitUtils.normalizeBranch; import static datadog.trace.api.git.GitUtils.normalizeTag; -import static datadog.trace.civisibility.utils.PathUtils.expandTilde; +import static datadog.trace.civisibility.utils.FileUtils.expandTilde; import datadog.trace.api.git.CommitInfo; import datadog.trace.api.git.GitInfo; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/UnknownCIInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/UnknownCIInfo.java index b30bdfd1e47..1e16adcad00 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/UnknownCIInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ci/UnknownCIInfo.java @@ -1,6 +1,6 @@ package datadog.trace.civisibility.ci; -import static datadog.trace.civisibility.utils.CIUtils.findParentPathBackwards; +import static datadog.trace.civisibility.utils.FileUtils.findParentPathBackwards; import datadog.trace.api.git.GitInfo; import java.nio.file.Path; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/communication/BackendApiFactory.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/communication/BackendApiFactory.java index 8f79c5b0a3a..c79fb8d65ab 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/communication/BackendApiFactory.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/communication/BackendApiFactory.java @@ -33,12 +33,7 @@ public BackendApiFactory(Config config, SharedCommunicationObjects sharedCommuni throw new FatalAgentMisconfigurationError( "Agentless mode is enabled and api key is not set. Please set application key"); } - String applicationKey = config.getApplicationKey(); - if (applicationKey == null || applicationKey.isEmpty()) { - log.warn( - "Agentless mode is enabled and application key is not set. Some CI Visibility features will be unavailable"); - } - return new IntakeApi(site, apiKey, applicationKey, timeoutMillis, retryPolicyFactory); + return new IntakeApi(site, apiKey, timeoutMillis, retryPolicyFactory); } DDAgentFeaturesDiscovery featuresDiscovery = diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/communication/IntakeApi.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/communication/IntakeApi.java index d40a50b78b4..d8617376b9c 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/communication/IntakeApi.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/communication/IntakeApi.java @@ -20,22 +20,15 @@ public class IntakeApi implements BackendApi { private static final String API_VERSION = "v2"; private static final String DD_API_KEY_HEADER = "dd-api-key"; - private static final String DD_APPLICATION_KEY_HEADER = "dd-application-key"; private final String apiKey; - private final String applicationKey; private final HttpRetryPolicy.Factory retryPolicyFactory; private final HttpUrl hostUrl; private final OkHttpClient httpClient; public IntakeApi( - String site, - String apiKey, - String applicationKey, - long timeoutMillis, - HttpRetryPolicy.Factory retryPolicyFactory) { + String site, String apiKey, long timeoutMillis, HttpRetryPolicy.Factory retryPolicyFactory) { this.apiKey = apiKey; - this.applicationKey = applicationKey; this.retryPolicyFactory = retryPolicyFactory; final String ciVisibilityAgentlessUrlStr = Config.get().getCiVisibilityAgentlessUrl(); @@ -56,10 +49,6 @@ public T post( Request.Builder requestBuilder = new Request.Builder().url(url).post(requestBody).addHeader(DD_API_KEY_HEADER, apiKey); - if (applicationKey != null) { - requestBuilder.addHeader(DD_APPLICATION_KEY_HEADER, applicationKey); - } - Request request = requestBuilder.build(); HttpRetryPolicy retryPolicy = retryPolicyFactory.create(); try (okhttp3.Response response = diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/CachingModuleExecutionSettingsFactory.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/CachingModuleExecutionSettingsFactory.java index de79f4e3fd6..85eb5f24b97 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/CachingModuleExecutionSettingsFactory.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/CachingModuleExecutionSettingsFactory.java @@ -3,7 +3,6 @@ import datadog.trace.api.Config; import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; -import datadog.trace.api.civisibility.config.JvmInfo; import datadog.trace.api.civisibility.config.ModuleExecutionSettings; import java.util.Objects; import javax.annotation.Nullable; diff --git a/internal-api/src/main/java/datadog/trace/api/civisibility/config/JvmInfo.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/JvmInfo.java similarity index 96% rename from internal-api/src/main/java/datadog/trace/api/civisibility/config/JvmInfo.java rename to dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/JvmInfo.java index 0736a6d0be2..0f7ca5ca7fd 100644 --- a/internal-api/src/main/java/datadog/trace/api/civisibility/config/JvmInfo.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/JvmInfo.java @@ -1,4 +1,4 @@ -package datadog.trace.api.civisibility.config; +package datadog.trace.civisibility.config; import java.util.Objects; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/JvmInfoFactory.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/JvmInfoFactory.java index 88acf3f1227..6a62ce3927d 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/JvmInfoFactory.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/JvmInfoFactory.java @@ -1,6 +1,5 @@ package datadog.trace.civisibility.config; -import datadog.trace.api.civisibility.config.JvmInfo; import datadog.trace.civisibility.utils.ShellCommandExecutor; import datadog.trace.util.ProcessUtils; import java.io.BufferedReader; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/ModuleExecutionSettingsFactory.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/ModuleExecutionSettingsFactory.java index a29e9195124..829c9d34e14 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/ModuleExecutionSettingsFactory.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/ModuleExecutionSettingsFactory.java @@ -1,6 +1,5 @@ package datadog.trace.civisibility.config; -import datadog.trace.api.civisibility.config.JvmInfo; import datadog.trace.api.civisibility.config.ModuleExecutionSettings; import javax.annotation.Nullable; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/ModuleExecutionSettingsFactoryImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/ModuleExecutionSettingsFactoryImpl.java index 4e91447b2c6..6122ac1aabe 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/ModuleExecutionSettingsFactoryImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/config/ModuleExecutionSettingsFactoryImpl.java @@ -2,19 +2,22 @@ import datadog.trace.api.Config; import datadog.trace.api.civisibility.config.Configurations; -import datadog.trace.api.civisibility.config.JvmInfo; import datadog.trace.api.civisibility.config.ModuleExecutionSettings; import datadog.trace.api.civisibility.config.SkippableTest; import datadog.trace.api.config.CiVisibilityConfig; import datadog.trace.api.git.GitInfo; import datadog.trace.api.git.GitInfoProvider; import datadog.trace.civisibility.git.tree.GitDataUploader; +import datadog.trace.civisibility.source.index.RepoIndex; +import datadog.trace.civisibility.source.index.RepoIndexProvider; import datadog.trace.util.Strings; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; @@ -32,16 +35,19 @@ public class ModuleExecutionSettingsFactoryImpl implements ModuleExecutionSettin private final Config config; private final ConfigurationApi configurationApi; private final GitDataUploader gitDataUploader; + private final RepoIndexProvider repoIndexProvider; private final String repositoryRoot; public ModuleExecutionSettingsFactoryImpl( Config config, ConfigurationApi configurationApi, GitDataUploader gitDataUploader, + RepoIndexProvider repoIndexProvider, String repositoryRoot) { this.config = config; this.configurationApi = configurationApi; this.gitDataUploader = gitDataUploader; + this.repoIndexProvider = repoIndexProvider; this.repositoryRoot = repositoryRoot; } @@ -68,7 +74,13 @@ public ModuleExecutionSettings create(JvmInfo jvmInfo, @Nullable String moduleNa ? getSkippableTestsByModulePath(Paths.get(repositoryRoot), tracerEnvironment) : Collections.emptyMap(); - return new ModuleExecutionSettings(systemProperties, skippableTestsByModulePath); + List coverageEnabledPackages = getCoverageEnabledPackages(codeCoverageEnabled); + return new ModuleExecutionSettings( + codeCoverageEnabled, + itrEnabled, + systemProperties, + skippableTestsByModulePath, + coverageEnabledPackages); } private TracerEnvironment buildTracerEnvironment( @@ -181,4 +193,46 @@ private Map> getSkippableTestsByModulePath( return Collections.emptyMap(); } } + + private List getCoverageEnabledPackages(boolean codeCoverageEnabled) { + if (!codeCoverageEnabled) { + return Collections.emptyList(); + } + + List includedPackages = config.getCiVisibilityJacocoPluginIncludes(); + if (includedPackages != null && !includedPackages.isEmpty()) { + return includedPackages; + } + + RepoIndex repoIndex = repoIndexProvider.getIndex(); + List packages = new ArrayList<>(repoIndex.getRootPackages()); + List excludedPackages = config.getCiVisibilityJacocoPluginExcludes(); + if (excludedPackages != null && !excludedPackages.isEmpty()) { + removeMatchingPackages(packages, excludedPackages); + } + return packages; + } + + private static void removeMatchingPackages(List packages, List excludedPackages) { + List excludedPrefixes = + excludedPackages.stream() + .map(ModuleExecutionSettingsFactoryImpl::trimTrailingAsterisk) + .collect(Collectors.toList()); + + Iterator packagesIterator = packages.iterator(); + while (packagesIterator.hasNext()) { + String p = packagesIterator.next(); + + for (String excludedPrefix : excludedPrefixes) { + if (p.startsWith(excludedPrefix)) { + packagesIterator.remove(); + break; + } + } + } + } + + private static String trimTrailingAsterisk(String s) { + return s.endsWith("*") ? s.substring(0, s.length() - 1) : s; + } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/AbstractTestContext.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/AbstractTestContext.java deleted file mode 100644 index 3e4ecad02f9..00000000000 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/AbstractTestContext.java +++ /dev/null @@ -1,37 +0,0 @@ -package datadog.trace.civisibility.context; - -import datadog.trace.api.civisibility.CIConstants; - -abstract class AbstractTestContext implements TestContext { - - private String status; - - @Override - public synchronized void reportChildStatus(String childStatus) { - if (childStatus == null) { - return; - } - switch (childStatus) { - case CIConstants.TEST_PASS: - if (status == null || CIConstants.TEST_SKIP.equals(status)) { - status = CIConstants.TEST_PASS; - } - break; - case CIConstants.TEST_FAIL: - status = CIConstants.TEST_FAIL; - break; - case CIConstants.TEST_SKIP: - if (status == null) { - status = CIConstants.TEST_SKIP; - } - break; - default: - break; - } - } - - @Override - public synchronized String getStatus() { - return status; - } -} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/EmptyTestContext.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/EmptyTestContext.java deleted file mode 100644 index e4e9e2595ed..00000000000 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/EmptyTestContext.java +++ /dev/null @@ -1,39 +0,0 @@ -package datadog.trace.civisibility.context; - -import datadog.trace.bootstrap.instrumentation.api.AgentSpan; -import javax.annotation.Nullable; - -public final class EmptyTestContext implements TestContext { - - public static final TestContext INSTANCE = new EmptyTestContext(); - - private EmptyTestContext() {} - - @Override - public Long getId() { - return null; - } - - @Nullable - @Override - public Long getParentId() { - return null; - } - - @Override - public void reportChildStatus(String status) {} - - @Override - public String getStatus() { - return null; - } - - @Override - public void reportChildTag(String key, Object value) {} - - @Nullable - @Override - public AgentSpan getSpan() { - return null; - } -} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/ParentProcessTestContext.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/ParentProcessTestContext.java deleted file mode 100644 index bd1e4f14eae..00000000000 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/ParentProcessTestContext.java +++ /dev/null @@ -1,66 +0,0 @@ -package datadog.trace.civisibility.context; - -import datadog.trace.bootstrap.instrumentation.api.AgentSpan; -import datadog.trace.bootstrap.instrumentation.api.Tags; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -public class ParentProcessTestContext extends AbstractTestContext implements TestContext { - - private volatile String testFramework; - private volatile String testFrameworkVersion; - private final long sessionId; - private final long moduleId; - - public ParentProcessTestContext(long sessionId, long moduleId) { - this.sessionId = sessionId; - this.moduleId = moduleId; - } - - @Override - public Long getId() { - return moduleId; - } - - @Nonnull - @Override - public Long getParentId() { - return sessionId; - } - - @Nullable - @Override - public AgentSpan getSpan() { - return null; - } - - @Override - public void reportChildTag(String key, Object value) { - // the method leaves room for a - // proper implementation using a thread-safe map, - // but for now it's just this, - // to save some performance costs - switch (key) { - case Tags.TEST_FRAMEWORK: - testFramework = String.valueOf(value); - break; - case Tags.TEST_FRAMEWORK_VERSION: - testFrameworkVersion = String.valueOf(value); - break; - default: - throw new IllegalArgumentException("Unexpected child tag reported: " + key); - } - } - - @Nullable - public Object getChildTag(String key) { - switch (key) { - case Tags.TEST_FRAMEWORK: - return testFramework; - case Tags.TEST_FRAMEWORK_VERSION: - return testFrameworkVersion; - default: - return null; - } - } -} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/SpanTestContext.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/SpanTestContext.java deleted file mode 100644 index b0fea0acfa3..00000000000 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/SpanTestContext.java +++ /dev/null @@ -1,50 +0,0 @@ -package datadog.trace.civisibility.context; - -import datadog.trace.bootstrap.instrumentation.api.AgentSpan; -import datadog.trace.bootstrap.instrumentation.api.Tags; - -public class SpanTestContext extends AbstractTestContext implements TestContext { - private final AgentSpan span; - private final TestContext parent; - - public SpanTestContext(AgentSpan span, TestContext parent) { - this.span = span; - this.parent = parent; - } - - @Override - public Long getId() { - return span.getSpanId(); - } - - @Override - public Long getParentId() { - return parent != null ? parent.getId() : null; - } - - @Override - public String getStatus() { - String status = (String) span.getTag(Tags.TEST_STATUS); - if (status != null) { - // status was set explicitly for container span - // (e.g. set up or tear down have failed) - // in this case we ignore children statuses - return status; - } else { - return super.getStatus(); - } - } - - @Override - public AgentSpan getSpan() { - return span; - } - - @Override - public void reportChildTag(String key, Object value) { - span.setTag(key, value); - if (parent != null) { - parent.reportChildTag(key, value); - } - } -} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/TestContext.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/TestContext.java deleted file mode 100644 index 32aa29647d7..00000000000 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/context/TestContext.java +++ /dev/null @@ -1,21 +0,0 @@ -package datadog.trace.civisibility.context; - -import datadog.trace.bootstrap.instrumentation.api.AgentSpan; -import javax.annotation.Nullable; - -public interface TestContext { - - Long getId(); - - @Nullable - Long getParentId(); - - void reportChildStatus(String status); - - String getStatus(); - - void reportChildTag(String key, Object value); - - @Nullable - AgentSpan getSpan(); -} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/CoverageProbeStoreFactory.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/CoverageProbeStoreFactory.java new file mode 100644 index 00000000000..3c9b4f7fb95 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/CoverageProbeStoreFactory.java @@ -0,0 +1,8 @@ +package datadog.trace.civisibility.coverage; + +import datadog.trace.api.civisibility.coverage.CoverageProbeStore; +import datadog.trace.civisibility.source.SourcePathResolver; + +public interface CoverageProbeStoreFactory extends CoverageProbeStore.Registry { + CoverageProbeStore create(SourcePathResolver sourcePathResolver); +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/CoverageUtils.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/CoverageUtils.java new file mode 100644 index 00000000000..a427811cf4d --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/CoverageUtils.java @@ -0,0 +1,110 @@ +package datadog.trace.civisibility.coverage; + +import datadog.trace.civisibility.source.index.RepoIndex; +import java.io.BufferedInputStream; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Collection; +import java.util.Collections; +import javax.annotation.Nullable; +import org.jacoco.core.analysis.Analyzer; +import org.jacoco.core.analysis.CoverageBuilder; +import org.jacoco.core.analysis.IBundleCoverage; +import org.jacoco.core.data.ExecutionDataReader; +import org.jacoco.core.data.ExecutionDataStore; +import org.jacoco.core.data.SessionInfoStore; +import org.jacoco.report.FileMultiReportOutput; +import org.jacoco.report.IReportVisitor; +import org.jacoco.report.InputStreamSourceFileLocator; +import org.jacoco.report.html.HTMLFormatter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public abstract class CoverageUtils { + private static final Logger LOGGER = LoggerFactory.getLogger(CoverageUtils.class); + + public static ExecutionDataStore parse(byte[] rawCoverageData) { + if (rawCoverageData == null) { + return null; + } + + try { + SessionInfoStore sessionInfoStore = new SessionInfoStore(); + ExecutionDataStore executionDataStore = new ExecutionDataStore(); + + ByteArrayInputStream input = new ByteArrayInputStream(rawCoverageData); + ExecutionDataReader dataReader = new ExecutionDataReader(input); + dataReader.setSessionInfoVisitor(sessionInfoStore); + dataReader.setExecutionDataVisitor(executionDataStore); + dataReader.read(); + + return executionDataStore; + } catch (Exception e) { + LOGGER.error("Error while parsing coverage data", e); + return null; + } + } + + @Nullable + public static IBundleCoverage createCoverageBundle( + ExecutionDataStore coverageData, Collection classesDirs) { + try { + CoverageBuilder coverageBuilder = new CoverageBuilder(); + Analyzer analyzer = new Analyzer(coverageData, coverageBuilder); + for (File outputClassesDir : classesDirs) { + if (outputClassesDir.exists()) { + analyzer.analyzeAll(outputClassesDir); + } + } + + return coverageBuilder.getBundle("Module coverage data"); + } catch (Exception e) { + LOGGER.error("Error while creating coverage bundle", e); + return null; + } + } + + public static void dumpCoverageReport( + IBundleCoverage coverageBundle, RepoIndex repoIndex, String repoRoot, File reportFolder) { + if (!reportFolder.exists() && !reportFolder.mkdirs()) { + LOGGER.info("Skipping report generation, could not create report dir: {}", reportFolder); + return; + } + try { + final HTMLFormatter htmlFormatter = new HTMLFormatter(); + final IReportVisitor visitor = + htmlFormatter.createVisitor(new FileMultiReportOutput(reportFolder)); + visitor.visitInfo(Collections.emptyList(), Collections.emptyList()); + visitor.visitBundle(coverageBundle, new RepoIndexFileLocator(repoIndex, repoRoot)); + visitor.visitEnd(); + } catch (Exception e) { + LOGGER.error("Error while creating report in {}", reportFolder, e); + } + } + + private static final class RepoIndexFileLocator extends InputStreamSourceFileLocator { + private final RepoIndex repoIndex; + private final String repoRoot; + + private RepoIndexFileLocator(RepoIndex repoIndex, String repoRoot) { + super("utf-8", 4); + this.repoIndex = repoIndex; + this.repoRoot = repoRoot; + } + + @Override + protected InputStream getSourceStream(String path) throws IOException { + String relativePath = repoIndex.getSourcePath(path); + if (relativePath == null) { + return null; + } + String absolutePath = + repoRoot + (!repoRoot.endsWith(File.separator) ? File.separator : "") + relativePath; + return new BufferedInputStream(Files.newInputStream(Paths.get(absolutePath))); + } + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/NoopCoverageProbeStore.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/NoopCoverageProbeStore.java index e2827084f37..111cf8eff35 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/NoopCoverageProbeStore.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/NoopCoverageProbeStore.java @@ -2,7 +2,7 @@ import datadog.trace.api.civisibility.coverage.CoverageProbeStore; import datadog.trace.api.civisibility.coverage.TestReport; -import datadog.trace.api.civisibility.source.SourcePathResolver; +import datadog.trace.civisibility.source.SourcePathResolver; import javax.annotation.Nullable; public class NoopCoverageProbeStore implements CoverageProbeStore { @@ -20,7 +20,7 @@ public TestReport getReport() { return null; } - public static final class NoopCoverageProbeStoreFactory implements CoverageProbeStore.Factory { + public static final class NoopCoverageProbeStoreFactory implements CoverageProbeStoreFactory { @Override public void setTotalProbeCount(String className, int totalProbeCount) {} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/SegmentlessTestProbes.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/SegmentlessTestProbes.java index fa3fbeaa6a5..cb133bd1e55 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/SegmentlessTestProbes.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/SegmentlessTestProbes.java @@ -3,7 +3,7 @@ import datadog.trace.api.civisibility.coverage.CoverageProbeStore; import datadog.trace.api.civisibility.coverage.TestReport; import datadog.trace.api.civisibility.coverage.TestReportFileEntry; -import datadog.trace.api.civisibility.source.SourcePathResolver; +import datadog.trace.civisibility.source.SourcePathResolver; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -56,7 +56,7 @@ public TestReport getReport() { return testReport; } - public static class SegmentlessTestProbesFactory implements Factory { + public static class SegmentlessTestProbesFactory implements CoverageProbeStoreFactory { @Override public void setTotalProbeCount(String className, int totalProbeCount) { diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/TestProbes.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/TestProbes.java index 26da3f25988..d0fa5a731f4 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/TestProbes.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/coverage/TestProbes.java @@ -3,7 +3,7 @@ import datadog.trace.api.civisibility.coverage.CoverageProbeStore; import datadog.trace.api.civisibility.coverage.TestReport; import datadog.trace.api.civisibility.coverage.TestReportFileEntry; -import datadog.trace.api.civisibility.source.SourcePathResolver; +import datadog.trace.civisibility.source.SourcePathResolver; import datadog.trace.civisibility.source.Utils; import java.io.InputStream; import java.util.ArrayList; @@ -128,7 +128,7 @@ public TestReport getReport() { return testReport; } - public static class TestProbesFactory implements CoverageProbeStore.Factory { + public static class TestProbesFactory implements CoverageProbeStoreFactory { @Override public void setTotalProbeCount(String className, int totalProbeCount) { diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/decorator/TestDecorator.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/decorator/TestDecorator.java index a8bdeab3407..91f3c5e8101 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/decorator/TestDecorator.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/decorator/TestDecorator.java @@ -8,6 +8,4 @@ public interface TestDecorator { AgentSpan afterStart(final AgentSpan span); CharSequence component(); - - AgentSpan beforeFinish(final AgentSpan span); } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/decorator/TestDecoratorImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/decorator/TestDecoratorImpl.java index 02daf4f66c6..0337e183d8e 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/decorator/TestDecoratorImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/decorator/TestDecoratorImpl.java @@ -1,11 +1,11 @@ package datadog.trace.civisibility.decorator; import datadog.trace.api.DDTags; -import datadog.trace.api.civisibility.config.JvmInfo; import datadog.trace.api.sampling.PrioritySampling; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; +import datadog.trace.civisibility.config.JvmInfo; import java.util.Map; public class TestDecoratorImpl implements TestDecorator { @@ -84,9 +84,4 @@ public AgentSpan afterStart(final AgentSpan span) { return span; } - - @Override - public AgentSpan beforeFinish(AgentSpan span) { - return span; - } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/events/BuildEventsHandlerImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/events/BuildEventsHandlerImpl.java index f775ac8bd76..8cd2afcd20d 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/events/BuildEventsHandlerImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/events/BuildEventsHandlerImpl.java @@ -1,31 +1,34 @@ package datadog.trace.civisibility.events; import datadog.trace.api.civisibility.CIConstants; -import datadog.trace.api.civisibility.config.JvmInfo; import datadog.trace.api.civisibility.config.ModuleExecutionSettings; import datadog.trace.api.civisibility.events.BuildEventsHandler; import datadog.trace.bootstrap.instrumentation.api.Tags; -import datadog.trace.civisibility.DDTestModuleImpl; -import datadog.trace.civisibility.DDTestSessionImpl; +import datadog.trace.civisibility.DDBuildSystemModule; +import datadog.trace.civisibility.DDBuildSystemSession; +import datadog.trace.civisibility.config.JvmInfo; import datadog.trace.civisibility.config.JvmInfoFactory; +import java.io.File; import java.nio.file.Path; +import java.util.Collection; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import javax.annotation.Nullable; public class BuildEventsHandlerImpl implements BuildEventsHandler { - private final ConcurrentMap inProgressTestSessions = + private final ConcurrentMap inProgressTestSessions = new ConcurrentHashMap<>(); - private final ConcurrentMap, DDTestModuleImpl> inProgressTestModules = + private final ConcurrentMap, DDBuildSystemModule> inProgressTestModules = new ConcurrentHashMap<>(); - private final DDTestSessionImpl.SessionImplFactory sessionFactory; + private final DDBuildSystemSession.Factory sessionFactory; private final JvmInfoFactory jvmInfoFactory; public BuildEventsHandlerImpl( - DDTestSessionImpl.SessionImplFactory sessionFactory, JvmInfoFactory jvmInfoFactory) { + DDBuildSystemSession.Factory sessionFactory, JvmInfoFactory jvmInfoFactory) { this.sessionFactory = sessionFactory; this.jvmInfoFactory = jvmInfoFactory; } @@ -38,21 +41,20 @@ public void onTestSessionStart( final String startCommand, final String buildSystemName, final String buildSystemVersion) { - DDTestSessionImpl testSession = - sessionFactory.startSession(projectName, projectRoot, buildSystemName, null); - testSession.setTag(Tags.TEST_COMMAND, startCommand); + DDBuildSystemSession testSession = + sessionFactory.startSession(projectName, projectRoot, startCommand, buildSystemName, null); testSession.setTag(Tags.TEST_TOOLCHAIN, buildSystemName + ":" + buildSystemVersion); inProgressTestSessions.put(sessionKey, testSession); } @Override public void onTestSessionFail(final T sessionKey, final Throwable throwable) { - DDTestSessionImpl testSession = getTestSession(sessionKey); + DDBuildSystemSession testSession = getTestSession(sessionKey); testSession.setErrorInfo(throwable); } - private DDTestSessionImpl getTestSession(T sessionKey) { - DDTestSessionImpl testSession = inProgressTestSessions.get(sessionKey); + private DDBuildSystemSession getTestSession(T sessionKey) { + DDBuildSystemSession testSession = inProgressTestSessions.get(sessionKey); if (testSession == null) { throw new IllegalStateException("Could not find session span for key: " + sessionKey); } @@ -61,7 +63,7 @@ private DDTestSessionImpl getTestSession(T sessionKey) { @Override public void onTestSessionFinish(final T sessionKey) { - DDTestSessionImpl testSession = inProgressTestSessions.remove(sessionKey); + DDBuildSystemSession testSession = inProgressTestSessions.remove(sessionKey); testSession.end(null); } @@ -69,13 +71,13 @@ public void onTestSessionFinish(final T sessionKey) { public ModuleInfo onTestModuleStart( final T sessionKey, final String moduleName, - String startCommand, - Map additionalTags) { + Collection outputClassesDirs, + @Nullable Map additionalTags) { - DDTestSessionImpl testSession = inProgressTestSessions.get(sessionKey); - DDTestModuleImpl testModule = testSession.testModuleStart(moduleName, null); + DDBuildSystemSession testSession = inProgressTestSessions.get(sessionKey); + DDBuildSystemModule testModule = + testSession.testModuleStart(moduleName, null, outputClassesDirs); testModule.setTag(Tags.TEST_STATUS, CIConstants.TEST_PASS); - testModule.setTag(Tags.TEST_COMMAND, startCommand); if (additionalTags != null) { for (Map.Entry e : additionalTags.entrySet()) { @@ -94,21 +96,21 @@ public ModuleInfo onTestModuleStart( @Override public void onTestModuleSkip(final T sessionKey, final String moduleName, final String reason) { - DDTestModuleImpl testModule = getTestModule(sessionKey, moduleName); + DDBuildSystemModule testModule = getTestModule(sessionKey, moduleName); testModule.setSkipReason(reason); } @Override public void onTestModuleFail( final T sessionKey, final String moduleName, final Throwable throwable) { - DDTestModuleImpl testModule = getTestModule(sessionKey, moduleName); + DDBuildSystemModule testModule = getTestModule(sessionKey, moduleName); testModule.setErrorInfo(throwable); } - private DDTestModuleImpl getTestModule(final T sessionKey, final String moduleName) { + private DDBuildSystemModule getTestModule(final T sessionKey, final String moduleName) { TestModuleDescriptor testModuleDescriptor = new TestModuleDescriptor<>(sessionKey, moduleName); - DDTestModuleImpl testModule = inProgressTestModules.get(testModuleDescriptor); + DDBuildSystemModule testModule = inProgressTestModules.get(testModuleDescriptor); if (testModule == null) { throw new IllegalStateException( "Could not find module for session key " + sessionKey + " and module name " + moduleName); @@ -120,7 +122,7 @@ private DDTestModuleImpl getTestModule(final T sessionKey, final String moduleNa public void onTestModuleFinish(T sessionKey, String moduleName) { TestModuleDescriptor testModuleDescriptor = new TestModuleDescriptor<>(sessionKey, moduleName); - DDTestModuleImpl testModule = inProgressTestModules.remove(testModuleDescriptor); + DDBuildSystemModule testModule = inProgressTestModules.remove(testModuleDescriptor); if (testModule == null) { throw new IllegalStateException( "Could not find module span for session key " @@ -133,7 +135,7 @@ public void onTestModuleFinish(T sessionKey, String moduleName) { @Override public ModuleExecutionSettings getModuleExecutionSettings(T sessionKey, Path jvmExecutablePath) { - DDTestSessionImpl testSession = getTestSession(sessionKey); + DDBuildSystemSession testSession = getTestSession(sessionKey); JvmInfo jvmInfo = jvmInfoFactory.getJvmInfo(jvmExecutablePath); return testSession.getModuleExecutionSettings(jvmInfo); } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/events/TestEventsHandlerImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/events/TestEventsHandlerImpl.java index aea15f15377..022f7d0882e 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/events/TestEventsHandlerImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/events/TestEventsHandlerImpl.java @@ -3,20 +3,21 @@ import static datadog.trace.util.Strings.toJson; import datadog.trace.api.DisableTestTrace; +import datadog.trace.api.civisibility.InstrumentationBridge; import datadog.trace.api.civisibility.config.SkippableTest; import datadog.trace.api.civisibility.events.TestEventsHandler; import datadog.trace.bootstrap.instrumentation.api.Tags; +import datadog.trace.civisibility.DDTestFrameworkModule; +import datadog.trace.civisibility.DDTestFrameworkSession; import datadog.trace.civisibility.DDTestImpl; -import datadog.trace.civisibility.DDTestModuleImpl; -import datadog.trace.civisibility.DDTestSessionImpl; import datadog.trace.civisibility.DDTestSuiteImpl; import java.lang.reflect.Method; import java.util.Collection; import java.util.Collections; -import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import javax.annotation.Nullable; +import org.objectweb.asm.Type; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -24,8 +25,8 @@ public class TestEventsHandlerImpl implements TestEventsHandler { private static final Logger log = LoggerFactory.getLogger(TestEventsHandlerImpl.class); - private final DDTestSessionImpl testSession; - private final DDTestModuleImpl testModule; + private final DDTestFrameworkSession testSession; + private final DDTestFrameworkModule testModule; private final ConcurrentMap testSuiteNestedCallCounters = new ConcurrentHashMap<>(); @@ -36,7 +37,8 @@ public class TestEventsHandlerImpl implements TestEventsHandler { private final ConcurrentMap inProgressTests = new ConcurrentHashMap<>(); - public TestEventsHandlerImpl(DDTestSessionImpl testSession, DDTestModuleImpl testModule) { + public TestEventsHandlerImpl( + DDTestFrameworkSession testSession, DDTestFrameworkModule testModule) { this.testSession = testSession; this.testModule = testModule; } @@ -146,7 +148,7 @@ public void onTestStart( TestSuiteDescriptor suiteDescriptor = new TestSuiteDescriptor(testSuiteName, testClass); DDTestSuiteImpl testSuite = inProgressTestSuites.get(suiteDescriptor); - DDTestImpl test = testSuite.testStart(testName, testMethodName, testMethod, null); + DDTestImpl test = testSuite.testStart(testName, testMethod, null); if (testFramework != null) { test.setTag(Tags.TEST_FRAMEWORK, testFramework); @@ -157,9 +159,24 @@ public void onTestStart( if (testParameters != null) { test.setTag(Tags.TEST_PARAMETERS, testParameters); } + if (testMethodName != null && testMethod != null) { + test.setTag(Tags.TEST_SOURCE_METHOD, testMethodName + Type.getMethodDescriptor(testMethod)); + } if (categories != null && !categories.isEmpty()) { String json = toJson(Collections.singletonMap("category", toJson(categories)), true); test.setTag(Tags.TEST_TRAITS, json); + + for (String category : categories) { + if (category.endsWith(InstrumentationBridge.ITR_UNSKIPPABLE_TAG)) { + test.setTag(Tags.TEST_ITR_UNSKIPPABLE, true); + + SkippableTest thisTest = new SkippableTest(testSuiteName, testName, testParameters, null); + if (testModule.isSkippable(thisTest)) { + test.setTag(Tags.TEST_ITR_FORCED_RUN, true); + } + break; + } + } } TestDescriptor descriptor = @@ -240,7 +257,7 @@ public void onTestIgnore( final @Nullable String testFramework, final @Nullable String testFrameworkVersion, final @Nullable String testParameters, - final @Nullable List categories, + final @Nullable Collection categories, final @Nullable Class testClass, final @Nullable String testMethodName, final @Nullable Method testMethod, diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/CILocalGitInfoBuilder.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/CILocalGitInfoBuilder.java index 36e748cc302..3b5bb103e2d 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/CILocalGitInfoBuilder.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/CILocalGitInfoBuilder.java @@ -2,14 +2,24 @@ import datadog.trace.api.git.GitInfo; import datadog.trace.api.git.GitInfoBuilder; +import datadog.trace.civisibility.git.tree.GitClient; +import datadog.trace.util.Strings; +import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class CILocalGitInfoBuilder implements GitInfoBuilder { + private static final Logger LOGGER = LoggerFactory.getLogger(CILocalGitInfoBuilder.class); + + private final GitClient.Factory gitClientFactory; private final String gitFolderName; - public CILocalGitInfoBuilder(String gitFolderName) { + public CILocalGitInfoBuilder(GitClient.Factory gitClientFactory, String gitFolderName) { + this.gitClientFactory = gitClientFactory; this.gitFolderName = gitFolderName; } @@ -18,8 +28,25 @@ public GitInfo build(@Nullable String repositoryPath) { if (repositoryPath == null) { return GitInfo.NOOP; } - return new LocalFSGitInfoExtractor() - .headCommit(Paths.get(repositoryPath, gitFolderName).toFile().getAbsolutePath()); + + Path gitPath = getGitPath(repositoryPath); + return new LocalFSGitInfoExtractor().headCommit(gitPath.toFile().getAbsolutePath()); + } + + private Path getGitPath(String repositoryPath) { + try { + GitClient gitClient = gitClientFactory.create(repositoryPath); + String gitFolder = gitClient.getGitFolder(); + if (Strings.isNotBlank(gitFolder)) { + Path gitFolderPath = Paths.get(gitFolder); + if (Files.exists(gitFolderPath)) { + return gitFolderPath; + } + } + } catch (Exception e) { + LOGGER.debug("Error while getting Git folder in " + repositoryPath, e); + } + return Paths.get(repositoryPath, gitFolderName); } @Override diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/GitClientGitInfoBuilder.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/GitClientGitInfoBuilder.java new file mode 100644 index 00000000000..fa5d2eac2fe --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/GitClientGitInfoBuilder.java @@ -0,0 +1,66 @@ +package datadog.trace.civisibility.git; + +import datadog.trace.api.Config; +import datadog.trace.api.git.CommitInfo; +import datadog.trace.api.git.GitInfo; +import datadog.trace.api.git.GitInfoBuilder; +import datadog.trace.api.git.PersonInfo; +import datadog.trace.civisibility.git.tree.GitClient; +import java.util.List; +import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class GitClientGitInfoBuilder implements GitInfoBuilder { + + private static final Logger LOGGER = LoggerFactory.getLogger(GitClientGitInfoBuilder.class); + + private final Config config; + private final GitClient.Factory gitClientFactory; + + public GitClientGitInfoBuilder(Config config, GitClient.Factory gitClientFactory) { + this.config = config; + this.gitClientFactory = gitClientFactory; + } + + @Override + public GitInfo build(@Nullable String repositoryPath) { + if (repositoryPath == null) { + return GitInfo.NOOP; + } + + GitClient gitClient = gitClientFactory.create(repositoryPath); + try { + String remoteName = config.getCiVisibilityGitRemoteName(); + String remoteUrl = gitClient.getRemoteUrl(remoteName); + String branch = gitClient.getCurrentBranch(); + List tags = gitClient.getTags(GitClient.HEAD); + String tag = !tags.isEmpty() ? tags.iterator().next() : null; + + String currentCommitSha = gitClient.getSha(GitClient.HEAD); + String fullMessage = gitClient.getFullMessage(GitClient.HEAD); + + String authorName = gitClient.getAuthorName(GitClient.HEAD); + String authorEmail = gitClient.getAuthorEmail(GitClient.HEAD); + String authorDate = gitClient.getAuthorDate(GitClient.HEAD); + PersonInfo author = new PersonInfo(authorName, authorEmail, authorDate); + + String committerName = gitClient.getCommitterName(GitClient.HEAD); + String committerEmail = gitClient.getCommitterEmail(GitClient.HEAD); + String committerDate = gitClient.getCommitterDate(GitClient.HEAD); + PersonInfo committer = new PersonInfo(committerName, committerEmail, committerDate); + + CommitInfo commitInfo = new CommitInfo(currentCommitSha, author, committer, fullMessage); + return new GitInfo(remoteUrl, branch, tag, commitInfo); + + } catch (Exception e) { + LOGGER.debug("Error while getting Git data from " + repositoryPath, e); + return GitInfo.NOOP; + } + } + + @Override + public int order() { + return 3; + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/tree/GitClient.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/tree/GitClient.java index 2d33add5525..c42ee432e89 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/tree/GitClient.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/tree/GitClient.java @@ -1,8 +1,10 @@ package datadog.trace.civisibility.git.tree; +import datadog.trace.api.Config; import datadog.trace.civisibility.utils.IOUtils; import datadog.trace.civisibility.utils.ShellCommandExecutor; import datadog.trace.util.Strings; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; @@ -11,13 +13,17 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.concurrent.TimeoutException; +import javax.annotation.Nullable; /** Client for fetching data and performing operations on a local Git repository. */ public class GitClient { + public static final String HEAD = "HEAD"; + private static final String DD_TEMP_DIRECTORY_PREFIX = "dd-ci-vis-"; private final String repoRoot; @@ -60,18 +66,38 @@ public boolean isShallow() throws IOException, TimeoutException, InterruptedExce return Boolean.parseBoolean(output); } + /** + * Returns the SHA of the head commit of the upstream (remote tracking) branch for the currently + * checked-out local branch. If the local branch is not tracking any remote branches, a {@link + * datadog.trace.civisibility.utils.ShellCommandExecutor.ShellCommandFailedException} exception + * will be thrown. + * + * @return The name of the upstream branch if the current local branch is tracking any. + * @throws ShellCommandExecutor.ShellCommandFailedException If the Git command fails with an error + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public String getUpstreamBranchSha() throws IOException, TimeoutException, InterruptedException { + return commandExecutor + .executeCommand(IOUtils::readFully, "git", "rev-parse", "@{upstream}") + .trim(); + } + /** * "Unshallows" the repo that the client is associated with by fetching missing commit data from * the server. * + * @param remoteCommitReference The commit to fetch from the remote repository, so local repo will + * be updated with this commit and its ancestors. If {@code null}, everything will be fetched. * @throws IOException If an error was encountered while writing command input or reading output * @throws TimeoutException If timeout was reached while waiting for Git command to finish * @throws InterruptedException If current thread was interrupted while waiting for Git command to * finish */ - public void unshallow() throws IOException, TimeoutException, InterruptedException { - String headSha = - commandExecutor.executeCommand(IOUtils::readFully, "git", "rev-parse", "HEAD").trim(); + public void unshallow(@Nullable String remoteCommitReference) + throws IOException, TimeoutException, InterruptedException { String remote = commandExecutor .executeCommand( @@ -85,16 +111,44 @@ public void unshallow() throws IOException, TimeoutException, InterruptedExcepti .trim(); // refetch data from the server for the given period of time - commandExecutor.executeCommand( - ShellCommandExecutor.OutputParser.IGNORE, - "git", - "fetch", - String.format("--shallow-since=='%s'", latestCommitsSince), - "--update-shallow", - "--filter=blob:none", - "--recurse-submodules=no", - remote, - headSha); + if (remoteCommitReference != null) { + String headSha = getSha(remoteCommitReference); + commandExecutor.executeCommand( + ShellCommandExecutor.OutputParser.IGNORE, + "git", + "fetch", + String.format("--shallow-since=='%s'", latestCommitsSince), + "--update-shallow", + "--filter=blob:none", + "--recurse-submodules=no", + remote, + headSha); + } else { + commandExecutor.executeCommand( + ShellCommandExecutor.OutputParser.IGNORE, + "git", + "fetch", + String.format("--shallow-since=='%s'", latestCommitsSince), + "--update-shallow", + "--filter=blob:none", + "--recurse-submodules=no", + remote); + } + } + + /** + * Returns the absolute path of the .git directory. + * + * @return absolute path + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull String getGitFolder() throws IOException, TimeoutException, InterruptedException { + return commandExecutor + .executeCommand(IOUtils::readFully, "git", "rev-parse", "--absolute-git-dir") + .trim(); } /** @@ -115,6 +169,178 @@ public String getRemoteUrl(String remoteName) .trim(); } + /** + * Returns current branch, or an empty string if HEAD is not pointing to a branch + * + * @return current branch, or an empty string if HEAD is not pointing to a branch + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull String getCurrentBranch() + throws IOException, TimeoutException, InterruptedException { + return commandExecutor + .executeCommand(IOUtils::readFully, "git", "branch", "--show-current") + .trim(); + } + + /** + * Returns list of tags that provided commit points to + * + * @param commit Commit SHA or reference (HEAD, branch name, etc) to check + * @return list of tags that the commit is pointing to, or empty list if there are no such tags + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull List getTags(String commit) + throws IOException, TimeoutException, InterruptedException { + try { + return commandExecutor.executeCommand( + IOUtils::readLines, "git", "describe", "--tags", "--exact-match", commit); + } catch (ShellCommandExecutor.ShellCommandFailedException e) { + // if provided commit is not tagged, + // command will fail because "--exact-match" is specified + return Collections.emptyList(); + } + } + + /** + * Returns SHA of the provided reference + * + * @param reference Reference (HEAD, branch name, etc) to check + * @return full SHA of the provided reference + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull String getSha(String reference) + throws IOException, TimeoutException, InterruptedException { + return commandExecutor.executeCommand(IOUtils::readFully, "git", "rev-parse", reference).trim(); + } + + /** + * Returns full message of the provided commit + * + * @param commit Commit SHA or reference (HEAD, branch name, etc) to check + * @return full message of the provided commit + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull String getFullMessage(String commit) + throws IOException, TimeoutException, InterruptedException { + return commandExecutor + .executeCommand(IOUtils::readFully, "git", "log", "-n", "1", "--format=%B", commit) + .trim(); + } + + /** + * Returns author name for the provided commit + * + * @param commit Commit SHA or reference (HEAD, branch name, etc) to check + * @return author name for the provided commit + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull String getAuthorName(String commit) + throws IOException, TimeoutException, InterruptedException { + return commandExecutor + .executeCommand(IOUtils::readFully, "git", "log", "-n", "1", "--format=%an", commit) + .trim(); + } + + /** + * Returns author email for the provided commit + * + * @param commit Commit SHA or reference (HEAD, branch name, etc) to check + * @return author email for the provided commit + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull String getAuthorEmail(String commit) + throws IOException, TimeoutException, InterruptedException { + return commandExecutor + .executeCommand(IOUtils::readFully, "git", "log", "-n", "1", "--format=%ae", commit) + .trim(); + } + + /** + * Returns author date in strict ISO 8601 format for the provided commit + * + * @param commit Commit SHA or reference (HEAD, branch name, etc) to check + * @return author date in strict ISO 8601 format + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull String getAuthorDate(String commit) + throws IOException, TimeoutException, InterruptedException { + return commandExecutor + .executeCommand(IOUtils::readFully, "git", "log", "-n", "1", "--format=%aI", commit) + .trim(); + } + + /** + * Returns committer name for the provided commit + * + * @param commit Commit SHA or reference (HEAD, branch name, etc) to check + * @return committer name for the provided commit + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull String getCommitterName(String commit) + throws IOException, TimeoutException, InterruptedException { + return commandExecutor + .executeCommand(IOUtils::readFully, "git", "log", "-n", "1", "--format=%cn", commit) + .trim(); + } + + /** + * Returns committer email for the provided commit + * + * @param commit Commit SHA or reference (HEAD, branch name, etc) to check + * @return committer email for the provided commit + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull String getCommitterEmail(String commit) + throws IOException, TimeoutException, InterruptedException { + return commandExecutor + .executeCommand(IOUtils::readFully, "git", "log", "-n", "1", "--format=%ce", commit) + .trim(); + } + + /** + * Returns committer date in strict ISO 8601 format for the provided commit + * + * @param commit Commit SHA or reference (HEAD, branch name, etc) to check + * @return committer date in strict ISO 8601 format + * @throws IOException If an error was encountered while writing command input or reading output + * @throws TimeoutException If timeout was reached while waiting for Git command to finish + * @throws InterruptedException If current thread was interrupted while waiting for Git command to + * finish + */ + public @NonNull String getCommitterDate(String commit) + throws IOException, TimeoutException, InterruptedException { + return commandExecutor + .executeCommand(IOUtils::readFully, "git", "log", "-n", "1", "--format=%cI", commit) + .trim(); + } + /** * Returns SHAs of the latest commits in the current branch. Maximum number of commits and how far * into the past to look are configured when the client is created. @@ -223,4 +449,17 @@ private Path createTempDirectory() throws IOException { public String toString() { return "GitClient{" + repoRoot + "}"; } + + public static class Factory { + private final Config config; + + public Factory(Config config) { + this.config = config; + } + + public GitClient create(String repoRoot) { + long commandTimeoutMillis = config.getCiVisibilityGitCommandTimeoutMillis(); + return new GitClient(repoRoot, "1 month ago", 1000, commandTimeoutMillis); + } + } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/tree/GitDataUploaderImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/tree/GitDataUploaderImpl.java index 88281cb722a..ca142328102 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/tree/GitDataUploaderImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/git/tree/GitDataUploaderImpl.java @@ -1,8 +1,12 @@ package datadog.trace.civisibility.git.tree; import datadog.trace.api.Config; +import datadog.trace.api.git.GitInfo; +import datadog.trace.api.git.GitInfoProvider; import datadog.trace.civisibility.utils.FileUtils; +import datadog.trace.civisibility.utils.ShellCommandExecutor; import datadog.trace.util.AgentThreadFactory; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; @@ -23,15 +27,24 @@ public class GitDataUploaderImpl implements GitDataUploader { private final Config config; private final GitDataApi gitDataApi; private final GitClient gitClient; + private final GitInfoProvider gitInfoProvider; + private final String repoRoot; private final String remoteName; private final Thread uploadFinishedShutdownHook; private volatile CompletableFuture callback; public GitDataUploaderImpl( - Config config, GitDataApi gitDataApi, GitClient gitClient, String remoteName) { + Config config, + GitDataApi gitDataApi, + GitClient gitClient, + GitInfoProvider gitInfoProvider, + String repoRoot, + String remoteName) { this.config = config; this.gitDataApi = gitDataApi; this.gitClient = gitClient; + this.gitInfoProvider = gitInfoProvider; + this.repoRoot = repoRoot; this.remoteName = remoteName; // maven has a way of calling System.exit() when the build is done. @@ -69,10 +82,11 @@ public Future startOrObserveGitDataUpload() { private void uploadGitData() { try { if (config.isCiVisibilityGitUnshallowEnabled() && gitClient.isShallow()) { - gitClient.unshallow(); + unshallowRepository(); } - String remoteUrl = gitClient.getRemoteUrl(remoteName); + GitInfo gitInfo = gitInfoProvider.getGitInfo(repoRoot); + String remoteUrl = gitInfo.getRepositoryURL(); List latestCommits = gitClient.getLatestCommits(); if (latestCommits.isEmpty()) { LOGGER.debug("No commits in the last month"); @@ -125,6 +139,27 @@ private void uploadGitData() { } } + private void unshallowRepository() throws IOException, TimeoutException, InterruptedException { + try { + gitClient.unshallow(GitClient.HEAD); + return; + } catch (ShellCommandExecutor.ShellCommandFailedException e) { + LOGGER.debug( + "Could not unshallow using HEAD - assuming HEAD points to a local commit that does not exist in the remote repo", + e); + } + + try { + String upstreamBranch = gitClient.getUpstreamBranchSha(); + gitClient.unshallow(upstreamBranch); + } catch (ShellCommandExecutor.ShellCommandFailedException e) { + LOGGER.debug( + "Could not unshallow using upstream branch - assuming currently checked out local branch does not track any remote branch", + e); + gitClient.unshallow(null); + } + } + private void waitForUploadToFinish() { try { long uploadTimeoutMillis = config.getCiVisibilityGitUploadTimeoutMillis(); diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/ModuleExecutionResult.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/ModuleExecutionResult.java index eb9b9ece616..be122f0e763 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/ModuleExecutionResult.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/ModuleExecutionResult.java @@ -1,6 +1,11 @@ package datadog.trace.civisibility.ipc; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; import java.util.Objects; import javax.annotation.Nullable; @@ -15,8 +20,8 @@ public class ModuleExecutionResult implements Signal { private final boolean coverageEnabled; private final boolean itrEnabled; private final long testsSkippedTotal; - @Nullable private final String testFramework; - @Nullable private final String testFrameworkVersion; + private final Collection testFrameworks; + @Nullable private final byte[] coverageData; public ModuleExecutionResult( long sessionId, @@ -24,15 +29,15 @@ public ModuleExecutionResult( boolean coverageEnabled, boolean itrEnabled, long testsSkippedTotal, - @Nullable String testFramework, - @Nullable String testFrameworkVersion) { + Collection testFrameworks, + @Nullable byte[] coverageData) { this.sessionId = sessionId; this.moduleId = moduleId; this.coverageEnabled = coverageEnabled; this.itrEnabled = itrEnabled; this.testsSkippedTotal = testsSkippedTotal; - this.testFramework = testFramework; - this.testFrameworkVersion = testFrameworkVersion; + this.testFrameworks = testFrameworks; + this.coverageData = coverageData; } public long getSessionId() { @@ -55,14 +60,13 @@ public long getTestsSkippedTotal() { return testsSkippedTotal; } - @Nullable - public String getTestFramework() { - return testFramework; + public Collection getTestFrameworks() { + return testFrameworks; } @Nullable - public String getTestFrameworkVersion() { - return testFrameworkVersion; + public byte[] getCoverageData() { + return coverageData; } @Override @@ -79,8 +83,8 @@ public boolean equals(Object o) { && coverageEnabled == that.coverageEnabled && itrEnabled == that.itrEnabled && testsSkippedTotal == that.testsSkippedTotal - && Objects.equals(testFramework, that.testFramework) - && Objects.equals(testFrameworkVersion, that.testFrameworkVersion); + && Objects.equals(testFrameworks, that.testFrameworks) + && Arrays.equals(coverageData, that.coverageData); } @Override @@ -91,8 +95,8 @@ public int hashCode() { coverageEnabled, itrEnabled, testsSkippedTotal, - testFramework, - testFrameworkVersion); + testFrameworks, + Arrays.hashCode(coverageData)); } @Override @@ -118,14 +122,20 @@ public SignalType getType() { @Override public ByteBuffer serialize() { - byte[] testFrameworkBytes = testFramework != null ? testFramework.getBytes() : null; - byte[] testFrameworkVersionBytes = - testFrameworkVersion != null ? testFrameworkVersion.getBytes() : null; - - int testFrameworkLength = testFrameworkBytes != null ? testFrameworkBytes.length : 0; - int testFrameworkVersionLength = - testFrameworkVersionBytes != null ? testFrameworkVersionBytes.length : 0; - int variableLength = Integer.BYTES * 2 + testFrameworkLength + testFrameworkVersionLength; + int coverageDataLength = coverageData != null ? coverageData.length : 0; + int variableLength = Integer.BYTES * 2 + coverageDataLength; + + for (TestFramework testFramework : testFrameworks) { + String testFrameworkName = testFramework.getName(); + String testFrameworkVersion = testFramework.getVersion(); + int testFrameworkNameBytes = + testFrameworkName != null ? testFrameworkName.getBytes(StandardCharsets.UTF_8).length : 0; + int testFrameworkVersionBytes = + testFrameworkVersion != null + ? testFrameworkVersion.getBytes(StandardCharsets.UTF_8).length + : 0; + variableLength += Integer.BYTES * 2 + testFrameworkNameBytes + testFrameworkVersionBytes; + } ByteBuffer buffer = ByteBuffer.allocate(FIXED_LENGTH + variableLength); buffer.putLong(sessionId); @@ -141,14 +151,30 @@ public ByteBuffer serialize() { } buffer.put(flags); - buffer.putInt(testFrameworkLength); - if (testFrameworkLength != 0) { - buffer.put(testFrameworkBytes); + buffer.putInt(testFrameworks.size()); + for (TestFramework testFramework : testFrameworks) { + String testFrameworkName = testFramework.getName(); + if (testFrameworkName != null) { + byte[] testFrameworkNameBytes = testFrameworkName.getBytes(StandardCharsets.UTF_8); + buffer.putInt(testFrameworkNameBytes.length); + buffer.put(testFrameworkNameBytes); + } else { + buffer.putInt(0); + } + + String testFrameworkVersion = testFramework.getVersion(); + if (testFrameworkVersion != null) { + byte[] testFrameworkVersionBytes = testFrameworkVersion.getBytes(StandardCharsets.UTF_8); + buffer.putInt(testFrameworkVersionBytes.length); + buffer.put(testFrameworkVersionBytes); + } else { + buffer.putInt(0); + } } - buffer.putInt(testFrameworkVersionLength); - if (testFrameworkVersionLength != 0) { - buffer.put(testFrameworkVersionBytes); + buffer.putInt(coverageDataLength); + if (coverageDataLength != 0) { + buffer.put(coverageData); } buffer.flip(); @@ -172,24 +198,39 @@ public static ModuleExecutionResult deserialize(ByteBuffer buffer) { boolean coverageEnabled = (flags & COVERAGE_ENABLED_FLAG) != 0; boolean itrEnabled = (flags & ITR_ENABLED_FLAG) != 0; - String testFramework; - int testFrameworkLength = buffer.getInt(); - if (testFrameworkLength != 0) { - byte[] testFrameworkBytes = new byte[testFrameworkLength]; - buffer.get(testFrameworkBytes); - testFramework = new String(testFrameworkBytes); - } else { - testFramework = null; + int testFrameworksSize = buffer.getInt(); + List testFrameworks = new ArrayList<>(testFrameworksSize); + for (int i = 0; i < testFrameworksSize; i++) { + int testFrameworkNameLength = buffer.getInt(); + String testFrameworkName; + if (testFrameworkNameLength != 0) { + byte[] testFrameworkNameBytes = new byte[testFrameworkNameLength]; + buffer.get(testFrameworkNameBytes); + testFrameworkName = new String(testFrameworkNameBytes, StandardCharsets.UTF_8); + } else { + testFrameworkName = null; + } + + int testFrameworkVersionLength = buffer.getInt(); + String testFrameworkVersion; + if (testFrameworkVersionLength != 0) { + byte[] testFrameworkVersionBytes = new byte[testFrameworkVersionLength]; + buffer.get(testFrameworkVersionBytes); + testFrameworkVersion = new String(testFrameworkVersionBytes, StandardCharsets.UTF_8); + } else { + testFrameworkVersion = null; + } + + testFrameworks.add(new TestFramework(testFrameworkName, testFrameworkVersion)); } - String testFrameworkVersion; - int testFrameworkVersionLength = buffer.getInt(); - if (testFrameworkVersionLength != 0) { - byte[] testFrameworkVersionBytes = new byte[testFrameworkVersionLength]; - buffer.get(testFrameworkVersionBytes); - testFrameworkVersion = new String(testFrameworkVersionBytes); + byte[] coverageData; + int coverageDataLength = buffer.getInt(); + if (coverageDataLength != 0) { + coverageData = new byte[coverageDataLength]; + buffer.get(coverageData); } else { - testFrameworkVersion = null; + coverageData = null; } return new ModuleExecutionResult( @@ -198,7 +239,7 @@ public static ModuleExecutionResult deserialize(ByteBuffer buffer) { coverageEnabled, itrEnabled, testsSkippedTotal, - testFramework, - testFrameworkVersion); + testFrameworks, + coverageData); } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SignalClient.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SignalClient.java index aae3926be88..013dbbb9bc4 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SignalClient.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SignalClient.java @@ -112,4 +112,21 @@ public SignalResponse send(Signal signal) throws IOException { static String getErrorMessage(ErrorResponse response) { return "Server returned an error: " + response.getMessage(); } + + public static final class Factory { + private final InetSocketAddress signalServerAddress; + + public Factory(InetSocketAddress signalServerAddress) { + this.signalServerAddress = signalServerAddress; + } + + public SignalClient create() { + try { + return new SignalClient(signalServerAddress); + } catch (IOException e) { + throw new RuntimeException( + "Could not instantiate signal client. Address: " + signalServerAddress, e); + } + } + } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SignalServerRunnable.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SignalServerRunnable.java index 9d8752beb45..6168cecf019 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SignalServerRunnable.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SignalServerRunnable.java @@ -9,6 +9,7 @@ import java.util.EnumMap; import java.util.Iterator; import java.util.Map; +import java.util.concurrent.TimeUnit; import java.util.function.Function; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -16,7 +17,7 @@ class SignalServerRunnable implements Runnable { private static final Logger LOGGER = LoggerFactory.getLogger(SignalServerRunnable.class); - + private static final long SELECT_TIMEOUT = TimeUnit.SECONDS.toMillis(5); private static final Map> DESERIALIZERS = new EnumMap<>(SignalType.class); @@ -51,7 +52,7 @@ public void run() { } private void processSelectableKeys() throws IOException { - selector.select(); + selector.select(SELECT_TIMEOUT); Iterator keyIterator = selector.selectedKeys().iterator(); while (keyIterator.hasNext()) { diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SkippableTestsRequest.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SkippableTestsRequest.java index 737d634f474..8f4f3f9c56e 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SkippableTestsRequest.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/SkippableTestsRequest.java @@ -1,6 +1,6 @@ package datadog.trace.civisibility.ipc; -import datadog.trace.api.civisibility.config.JvmInfo; +import datadog.trace.civisibility.config.JvmInfo; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Objects; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/TestFramework.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/TestFramework.java new file mode 100644 index 00000000000..7130c3cdac5 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/ipc/TestFramework.java @@ -0,0 +1,38 @@ +package datadog.trace.civisibility.ipc; + +import java.util.Objects; + +public final class TestFramework { + private final String name; + private final String version; + + public TestFramework(String name, String version) { + this.name = name; + this.version = version; + } + + public String getName() { + return name; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TestFramework that = (TestFramework) o; + return Objects.equals(name, that.name) && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(name, version); + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/BestEffortMethodLinesResolver.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/BestEffortMethodLinesResolver.java new file mode 100644 index 00000000000..7a7f3b7be71 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/BestEffortMethodLinesResolver.java @@ -0,0 +1,25 @@ +package datadog.trace.civisibility.source; + +import java.lang.reflect.Method; +import javax.annotation.Nonnull; + +public class BestEffortMethodLinesResolver implements MethodLinesResolver { + + private final MethodLinesResolver[] delegates; + + public BestEffortMethodLinesResolver(MethodLinesResolver... delegates) { + this.delegates = delegates; + } + + @Nonnull + @Override + public MethodLines getLines(@Nonnull Method method) { + for (MethodLinesResolver delegate : delegates) { + MethodLines lines = delegate.getLines(method); + if (lines.isValid()) { + return lines; + } + } + return MethodLines.EMPTY; + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/BestEfforSourcePathResolver.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/BestEffortSourcePathResolver.java similarity index 69% rename from dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/BestEfforSourcePathResolver.java rename to dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/BestEffortSourcePathResolver.java index bb1b73192d2..9d83ddc2cbf 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/BestEfforSourcePathResolver.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/BestEffortSourcePathResolver.java @@ -1,14 +1,13 @@ package datadog.trace.civisibility.source; -import datadog.trace.api.civisibility.source.SourcePathResolver; import javax.annotation.Nonnull; import javax.annotation.Nullable; -public class BestEfforSourcePathResolver implements SourcePathResolver { +public class BestEffortSourcePathResolver implements SourcePathResolver { private final SourcePathResolver[] delegates; - public BestEfforSourcePathResolver(SourcePathResolver... delegates) { + public BestEffortSourcePathResolver(SourcePathResolver... delegates) { this.delegates = delegates; } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/MethodLinesResolverImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/ByteCodeMethodLinesResolver.java similarity index 95% rename from dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/MethodLinesResolverImpl.java rename to dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/ByteCodeMethodLinesResolver.java index 944a28cb765..8d5d3d26411 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/MethodLinesResolverImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/ByteCodeMethodLinesResolver.java @@ -16,9 +16,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class MethodLinesResolverImpl implements MethodLinesResolver { +public class ByteCodeMethodLinesResolver implements MethodLinesResolver { - private static final Logger log = LoggerFactory.getLogger(MethodLinesResolverImpl.class); + private static final Logger log = LoggerFactory.getLogger(ByteCodeMethodLinesResolver.class); private final DDCache, ClassMethodLines> methodLinesCache = DDCaches.newFixedSizeIdentityCache(16); diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/CompilerAidedMethodLinesResolver.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/CompilerAidedMethodLinesResolver.java new file mode 100644 index 00000000000..15bdaae5b01 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/CompilerAidedMethodLinesResolver.java @@ -0,0 +1,21 @@ +package datadog.trace.civisibility.source; + +import datadog.compiler.utils.CompilerUtils; +import java.lang.reflect.Method; +import javax.annotation.Nonnull; + +public class CompilerAidedMethodLinesResolver implements MethodLinesResolver { + @Nonnull + @Override + public MethodLines getLines(@Nonnull Method method) { + int startLine = CompilerUtils.getStartLine(method); + if (startLine <= 0) { + return MethodLines.EMPTY; + } + int endLine = CompilerUtils.getEndLine(method); + if (endLine <= 0) { + return MethodLines.EMPTY; + } + return new MethodLines(startLine, endLine); + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/CompilerAidedSourcePathResolver.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/CompilerAidedSourcePathResolver.java index 18d77fe96a2..c8000a10ce8 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/CompilerAidedSourcePathResolver.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/CompilerAidedSourcePathResolver.java @@ -1,7 +1,6 @@ package datadog.trace.civisibility.source; import datadog.compiler.utils.CompilerUtils; -import datadog.trace.api.civisibility.source.SourcePathResolver; import java.io.File; import javax.annotation.Nonnull; import javax.annotation.Nullable; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/MethodLinesResolver.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/MethodLinesResolver.java index 99cd72c5b2f..0c046927f10 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/MethodLinesResolver.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/MethodLinesResolver.java @@ -1,6 +1,7 @@ package datadog.trace.civisibility.source; import java.lang.reflect.Method; +import java.util.Objects; import javax.annotation.Nonnull; public interface MethodLinesResolver { @@ -30,5 +31,22 @@ public int getFinishLineNumber() { public boolean isValid() { return startLineNumber <= finishLineNumber; } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + MethodLines that = (MethodLines) o; + return startLineNumber == that.startLineNumber && finishLineNumber == that.finishLineNumber; + } + + @Override + public int hashCode() { + return Objects.hash(startLineNumber, finishLineNumber); + } } } diff --git a/internal-api/src/main/java/datadog/trace/api/civisibility/source/SourcePathResolver.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/SourcePathResolver.java similarity index 88% rename from internal-api/src/main/java/datadog/trace/api/civisibility/source/SourcePathResolver.java rename to dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/SourcePathResolver.java index 97adfbe7fc6..23c0f2682c8 100644 --- a/internal-api/src/main/java/datadog/trace/api/civisibility/source/SourcePathResolver.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/SourcePathResolver.java @@ -1,4 +1,4 @@ -package datadog.trace.api.civisibility.source; +package datadog.trace.civisibility.source; import javax.annotation.Nonnull; import javax.annotation.Nullable; diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/SourceRootResolver.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/PackageResolver.java similarity index 52% rename from dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/SourceRootResolver.java rename to dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/PackageResolver.java index 1a257b8fa5a..e409f2cba71 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/SourceRootResolver.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/PackageResolver.java @@ -3,6 +3,6 @@ import java.io.IOException; import java.nio.file.Path; -public interface SourceRootResolver { - Path getSourceRoot(Path sourceFile) throws IOException; +public interface PackageResolver { + Path getPackage(Path sourceFile) throws IOException; } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/SourceRootResolverImpl.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/PackageResolverImpl.java similarity index 67% rename from dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/SourceRootResolverImpl.java rename to dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/PackageResolverImpl.java index 315729a0ffc..6aedf77370a 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/SourceRootResolverImpl.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/PackageResolverImpl.java @@ -8,34 +8,28 @@ import java.nio.file.InvalidPathException; import java.nio.file.Path; -class SourceRootResolverImpl implements SourceRootResolver { +class PackageResolverImpl implements PackageResolver { private static final String PACKAGE_KEYWORD = "package"; private final FileSystem fileSystem; - SourceRootResolverImpl(FileSystem fileSystem) { + PackageResolverImpl(FileSystem fileSystem) { this.fileSystem = fileSystem; } /** - * Given a path to a Java source file, returns its source root (i.e. path without the filename and - * package folders). - * - *

For example, a class foo.bar.MyClass located at - * /repo/src/foo/bar/MyClass.java will have the source root /repo/src + * Given a path to a source file, returns its package path. * *

The implementation of this method is rather naive: it does not actually parse the file, nor * does it build an AST. * - *

It simply looks for a line, that contains the package keyword, extracts the + *

It simply looks for a line, that contains the package keyword and extracts the * part that goes after it and until the nearest ; character, then verifies that the - * extracted part looks plausible by checking the actual file path (package path is the suffix - * that is stripped from the full path in order to get the source root). + * extracted part looks plausible by checking the actual file path. */ @Override - public Path getSourceRoot(Path sourceFile) throws IOException { + public Path getPackage(Path sourceFile) throws IOException { Path folder = sourceFile.getParent(); - try (BufferedReader br = Files.newBufferedReader(sourceFile)) { String line; while ((line = br.readLine()) != null) { @@ -53,7 +47,7 @@ public Path getSourceRoot(Path sourceFile) throws IOException { int packageNameEnd = line.indexOf(';', packageNameStart); if (packageNameEnd == -1) { - packageNameEnd = lineLength; // no ';' is possible if this is a groovy file + packageNameEnd = lineLength; // no ';' is possible if this is a Groovy file } String packageName = line.substring(packageNameStart, packageNameEnd); @@ -64,18 +58,13 @@ public Path getSourceRoot(Path sourceFile) throws IOException { continue; } - if (!folder.endsWith(packagePath)) { - continue; + if (folder.endsWith(packagePath)) { + return packagePath; } - - // remove package path suffix from folder path to get source root - return folder - .getRoot() - .resolve(folder.subpath(0, folder.getNameCount() - packagePath.getNameCount())); } } // apparently there is no package declaration - class is located in the default package - return folder; + return fileSystem.getPath(""); } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/PackageTree.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/PackageTree.java new file mode 100644 index 00000000000..d7a9f5d35db --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/PackageTree.java @@ -0,0 +1,148 @@ +package datadog.trace.civisibility.source.index; + +import java.nio.file.Path; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.Deque; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * This class maintains the list of packages present in a repository. + * + *

If a package includes classes, all of its children packages are ignored (that is, if there are + * packages a/b, a/b/c, a/b/d, all of which contain classes, only a/b will be retained - a/b/c and + * a/b/d will be discarded as redundant). + * + *

The length of the list is limited by {@link #MAX_CHILDREN_LEAVES}. If there are more packages + * present than the limit, coarsening will happen (for instance, if there are packages a/b/c, a/b/d, + * b/c/d, b/c/e/f and the limit is 2, the packages will be coarsened and a/b, b/c will be retained + * as the result). + * + *

The intent is to be as specific as possible without exceeding the limit, so coarsening applies + * first to the "longest" package names (not in terms of the actual string length, but in terms of + * the number of segments separated by "."). If there are multiple packages with the same length, + * those that have more children will be coarsened first. + */ +public class PackageTree { + + private static final int MAX_CHILDREN_LEAVES = 25; + + private final Node root = new Node(null, ""); + + void add(Path packagePath) { + if (packagePath.toString().isEmpty()) { + return; + } + root.add(packagePath.iterator()); + } + + List asList() { + truncateIfNeeded(root); + + List childrenPackages = new ArrayList<>(MAX_CHILDREN_LEAVES); + for (Node child : root.children.values()) { + child.stringify(childrenPackages, ""); + } + return childrenPackages; + } + + private static void truncateIfNeeded(Node root) { + Deque> nodesByDepth = new ArrayDeque<>(); + + List current = Collections.singletonList(root); + while (!current.isEmpty()) { + List next = new ArrayList<>(); + for (Node treeNode : current) { + next.addAll(treeNode.children.values()); + } + nodesByDepth.push(current); + current = next; + } + + // start truncating with the deepest nodes + // (i.e. most specific packages names) + while (!nodesByDepth.isEmpty()) { + List nodes = nodesByDepth.pop(); + // sorting the nodes now as leafChildren counts might have changed + // if their children were truncated + nodes.sort(Comparator.comparingInt(node -> -node.leafChildren)); + + for (Node node : nodes) { + if (root.leafChildren <= MAX_CHILDREN_LEAVES) { + // stop as soon as we have truncated enough, even if it's mid-level + return; + } else { + node.truncate(); + } + } + } + } + + private static final class Node { + private final Node parent; + private final String name; + private Map children = new HashMap<>(); + private int leafChildren; + private boolean leaf; + + private Node(Node parent, String name) { + this.parent = parent; + this.name = name; + } + + private int add(Iterator iterator) { + if (leaf) { + return 0; + + } else if (!iterator.hasNext()) { + leaf = true; + if (leafChildren == 0) { + return ++leafChildren; + } else { + // what used to be a non-leaf is now a leaf, + // truncating children + int delta = 1 - leafChildren; + children = Collections.emptyMap(); + leafChildren = 1; + return delta; + } + + } else { + Path element = iterator.next(); + Node child = + children.computeIfAbsent(element.toString(), nodeName -> new Node(this, nodeName)); + int delta = child.add(iterator); + leafChildren += delta; + return delta; + } + } + + private void truncate() { + children = Collections.emptyMap(); + leaf = true; + + int delta = leafChildren - 1; + Node current = this; + while (current != null) { + current.leafChildren -= delta; + current = current.parent; + } + } + + private void stringify(List childrenPackages, String currentPath) { + currentPath += name + "."; + if (leaf) { + childrenPackages.add(currentPath + "*"); + } else { + for (Node child : children.values()) { + child.stringify(childrenPackages, currentPath); + } + } + } + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndex.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndex.java index 10a735029ec..d8010aeb072 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndex.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndex.java @@ -27,7 +27,8 @@ public class RepoIndex { static final RepoIndex EMPTY = - new RepoIndex(ClassNameTrie.Builder.EMPTY_TRIE, Collections.emptyList()); + new RepoIndex( + ClassNameTrie.Builder.EMPTY_TRIE, Collections.emptyList(), Collections.emptyList()); private static final Logger log = LoggerFactory.getLogger(RepoIndex.class); private static final int ACCESS_MODIFIERS = @@ -35,10 +36,16 @@ public class RepoIndex { private final ClassNameTrie trie; private final List sourceRoots; + private final List rootPackages; - RepoIndex(ClassNameTrie trie, List sourceRoots) { + RepoIndex(ClassNameTrie trie, List sourceRoots, List rootPackages) { this.trie = trie; this.sourceRoots = sourceRoots; + this.rootPackages = rootPackages; + } + + public List getRootPackages() { + return rootPackages; } @Nullable @@ -66,6 +73,16 @@ public String getSourcePath(@Nonnull Class c) { } } + @Nullable + public String getSourcePath(String pathRelativeToSourceRoot) { + int sourceRootIdx = trie.apply(pathRelativeToSourceRoot); + if (sourceRootIdx >= 0) { + return sourceRoots.get(sourceRootIdx) + File.separator + pathRelativeToSourceRoot; + } else { + return null; + } + } + private SourceType detectSourceType(Class c) { Class[] interfaces = c.getInterfaces(); for (Class anInterface : interfaces) { @@ -81,6 +98,9 @@ private SourceType detectSourceType(Class c) { if ("kotlin.Metadata".equals(annotationType.getName())) { return SourceType.KOTLIN; } + if ("scala.reflect.ScalaSignature".equals(annotationType.getName())) { + return SourceType.SCALA; + } } // assuming Java @@ -169,10 +189,10 @@ public ByteBuffer serialize() { byte[] serializedTrie = byteArrayOutputStream.toByteArray(); totalLength = Integer.BYTES + serializedTrie.length; - int idx = 0; + int sourceRootIdx = 0; byte[][] sourceRootBytes = new byte[sourceRoots.size()][]; for (String sourceRoot : sourceRoots) { - sourceRootBytes[idx++] = sourceRoot.getBytes(StandardCharsets.UTF_8); + sourceRootBytes[sourceRootIdx++] = sourceRoot.getBytes(StandardCharsets.UTF_8); } totalLength += Integer.BYTES; @@ -180,6 +200,17 @@ public ByteBuffer serialize() { totalLength += Integer.BYTES + sourceRoot.length; } + int rootPackageIds = 0; + byte[][] rootPackageBytes = new byte[rootPackages.size()][]; + for (String rootPackage : rootPackages) { + rootPackageBytes[rootPackageIds++] = rootPackage.getBytes(StandardCharsets.UTF_8); + } + + totalLength += Integer.BYTES; + for (byte[] rootPackage : rootPackageBytes) { + totalLength += Integer.BYTES + rootPackage.length; + } + ByteBuffer buffer = ByteBuffer.allocate(totalLength); buffer.putInt(serializedTrie.length); buffer.put(serializedTrie); @@ -190,6 +221,12 @@ public ByteBuffer serialize() { buffer.put(sourceRoot); } + buffer.putInt(rootPackageBytes.length); + for (byte[] rootPackage : rootPackageBytes) { + buffer.putInt(rootPackage.length); + buffer.put(rootPackage); + } + buffer.flip(); return buffer; } @@ -216,6 +253,16 @@ public static RepoIndex deserialize(ByteBuffer buffer) { buffer.get(sourceRootBytes); sourceRoots.add(new String(sourceRootBytes, StandardCharsets.UTF_8)); } - return new RepoIndex(trie, sourceRoots); + + int rootPackagesCount = buffer.getInt(); + List rootPackages = new ArrayList<>(rootPackagesCount); + while (rootPackagesCount-- > 0) { + int rootPackageLength = buffer.getInt(); + byte[] rootPackageBytes = new byte[rootPackageLength]; + buffer.get(rootPackageBytes); + rootPackages.add(new String(rootPackageBytes, StandardCharsets.UTF_8)); + } + + return new RepoIndex(trie, sourceRoots, rootPackages); } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexBuilder.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexBuilder.java index 0904b5abb02..d0d0675e001 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexBuilder.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexBuilder.java @@ -21,7 +21,7 @@ public class RepoIndexBuilder implements RepoIndexProvider { private static final Logger log = LoggerFactory.getLogger(RepoIndexBuilder.class); private final String repoRoot; - private final SourceRootResolver sourceRootResolver; + private final PackageResolver packageResolver; private final FileSystem fileSystem; private final Object indexInitializationLock = new Object(); @@ -29,13 +29,13 @@ public class RepoIndexBuilder implements RepoIndexProvider { public RepoIndexBuilder(String repoRoot, FileSystem fileSystem) { this.repoRoot = repoRoot; - this.sourceRootResolver = new SourceRootResolverImpl(fileSystem); + this.packageResolver = new PackageResolverImpl(fileSystem); this.fileSystem = fileSystem; } - RepoIndexBuilder(String repoRoot, SourceRootResolver sourceRootResolver, FileSystem fileSystem) { + RepoIndexBuilder(String repoRoot, PackageResolver packageResolver, FileSystem fileSystem) { this.repoRoot = repoRoot; - this.sourceRootResolver = sourceRootResolver; + this.packageResolver = packageResolver; this.fileSystem = fileSystem; } @@ -60,7 +60,7 @@ private RepoIndex doGetIndex() { Path repoRootPath = fileSystem.getPath(repoRoot); RepoIndexingFileVisitor repoIndexingFileVisitor = - new RepoIndexingFileVisitor(sourceRootResolver, repoRootPath); + new RepoIndexingFileVisitor(packageResolver, repoRootPath); long startTime = System.currentTimeMillis(); try { @@ -75,31 +75,34 @@ private RepoIndex doGetIndex() { long duration = System.currentTimeMillis() - startTime; RepoIndexingStats stats = repoIndexingFileVisitor.indexingStats; + RepoIndex index = repoIndexingFileVisitor.getIndex(); log.info( - "Indexing took {} ms. Files visited: {}, source files visited: {}, source roots found: {}", + "Indexing took {} ms. Files visited: {}, source files visited: {}, source roots found: {}, root packages found: {}", duration, stats.filesVisited, stats.sourceFilesVisited, - stats.sourceRoots); - - return repoIndexingFileVisitor.getIndex(); + repoIndexingFileVisitor.sourceRoots.size(), + index.getRootPackages()); + return index; } private static final class RepoIndexingFileVisitor implements FileVisitor { private static final Logger log = LoggerFactory.getLogger(RepoIndexingFileVisitor.class); - private final SourceRootResolver sourceRootResolver; + private final PackageResolver packageResolver; private final ClassNameTrie.Builder trieBuilder; private final LinkedHashSet sourceRoots; + private final PackageTree packageTree; private final RepoIndexingStats indexingStats; private final Path repoRoot; - private RepoIndexingFileVisitor(SourceRootResolver sourceRootResolver, Path repoRoot) { - this.sourceRootResolver = sourceRootResolver; + private RepoIndexingFileVisitor(PackageResolver packageResolver, Path repoRoot) { + this.packageResolver = packageResolver; this.repoRoot = repoRoot; trieBuilder = new ClassNameTrie.Builder(); sourceRoots = new LinkedHashSet<>(); + packageTree = new PackageTree(); indexingStats = new RepoIndexingStats(); } @@ -118,9 +121,11 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { if (sourceType != null) { indexingStats.sourceFilesVisited++; - Path currentSourceRoot = sourceRootResolver.getSourceRoot(file); + Path packagePath = packageResolver.getPackage(file); + packageTree.add(packagePath); + + Path currentSourceRoot = getSourceRoot(file, packagePath); sourceRoots.add(repoRoot.relativize(currentSourceRoot).toString()); - indexingStats.sourceRoots++; Path relativePath = currentSourceRoot.relativize(file); String classNameWithExtension = relativePath.toString().replace(File.separatorChar, '.'); @@ -134,6 +139,14 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { return FileVisitResult.CONTINUE; } + private Path getSourceRoot(Path file, Path packagePath) { + Path folder = file.getParent(); + // remove package path suffix from folder path to get source root + return folder + .getRoot() + .resolve(folder.subpath(0, folder.getNameCount() - packagePath.getNameCount())); + } + @Override public FileVisitResult visitFileFailed(Path file, IOException exc) { if (exc != null) { @@ -151,13 +164,13 @@ public FileVisitResult postVisitDirectory(Path dir, IOException exc) { } public RepoIndex getIndex() { - return new RepoIndex(trieBuilder.buildTrie(), new ArrayList<>(sourceRoots)); + return new RepoIndex( + trieBuilder.buildTrie(), new ArrayList<>(sourceRoots), packageTree.asList()); } } private static final class RepoIndexingStats { int filesVisited; int sourceFilesVisited; - int sourceRoots; } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexFetcher.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexFetcher.java index e799ef1d488..98bd651e4f4 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexFetcher.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexFetcher.java @@ -3,7 +3,6 @@ import datadog.trace.civisibility.ipc.RepoIndexRequest; import datadog.trace.civisibility.ipc.RepoIndexResponse; import datadog.trace.civisibility.ipc.SignalClient; -import java.util.function.Supplier; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -11,11 +10,11 @@ public class RepoIndexFetcher implements RepoIndexProvider { private static final Logger log = LoggerFactory.getLogger(RepoIndexFetcher.class); - private final Supplier signalClientFactory; + private final SignalClient.Factory signalClientFactory; private final Object indexInitializationLock = new Object(); private volatile RepoIndex index; - public RepoIndexFetcher(Supplier signalClientFactory) { + public RepoIndexFetcher(SignalClient.Factory signalClientFactory) { this.signalClientFactory = signalClientFactory; } @@ -32,7 +31,7 @@ public RepoIndex getIndex() { } private RepoIndex doGetIndex() { - try (SignalClient signalClient = signalClientFactory.get()) { + try (SignalClient signalClient = signalClientFactory.create()) { RepoIndexResponse response = (RepoIndexResponse) signalClient.send(RepoIndexRequest.INSTANCE); return response.getIndex(); } catch (Exception e) { diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexSourcePathResolver.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexSourcePathResolver.java index 845bd76120c..5ccea333d79 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexSourcePathResolver.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/RepoIndexSourcePathResolver.java @@ -1,7 +1,7 @@ package datadog.trace.civisibility.source.index; import datadog.trace.api.Config; -import datadog.trace.api.civisibility.source.SourcePathResolver; +import datadog.trace.civisibility.source.SourcePathResolver; import java.net.URL; import java.nio.file.FileSystem; import java.security.CodeSource; @@ -20,9 +20,9 @@ public RepoIndexSourcePathResolver(String repoRoot, RepoIndexProvider indexProvi } RepoIndexSourcePathResolver( - String repoRoot, SourceRootResolver sourceRootResolver, FileSystem fileSystem) { + String repoRoot, PackageResolver packageResolver, FileSystem fileSystem) { this.repoRoot = repoRoot; - this.indexProvider = new RepoIndexBuilder(repoRoot, sourceRootResolver, fileSystem); + this.indexProvider = new RepoIndexBuilder(repoRoot, packageResolver, fileSystem); } @Nullable diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/SourceType.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/SourceType.java index ea28658745a..792dba71118 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/SourceType.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/source/index/SourceType.java @@ -3,7 +3,10 @@ enum SourceType { JAVA(".java"), GROOVY(".groovy"), - KOTLIN(".kt"); + KOTLIN(".kt"), + SCALA(".scala"); + + private static final SourceType[] UNIVERSE = SourceType.values(); private final String extension; @@ -16,7 +19,7 @@ public String getExtension() { } static SourceType getByFileName(String fileName) { - for (SourceType sourceType : values()) { + for (SourceType sourceType : UNIVERSE) { if (fileName.endsWith(sourceType.extension)) { return sourceType; } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/CIUtils.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/CIUtils.java deleted file mode 100644 index fcdc37cb1ee..00000000000 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/CIUtils.java +++ /dev/null @@ -1,38 +0,0 @@ -package datadog.trace.civisibility.utils; - -import java.nio.file.Files; -import java.nio.file.Path; - -public final class CIUtils { - - private CIUtils() {} - - /** - * Search the parent path that contains the target file. If the current path does not have the - * target file, the method continues with the parent path. If the path is not found, it returns - * null. - * - * @param current - * @param target - * @return the parent path that contains the target file. - */ - public static Path findParentPathBackwards( - final Path current, final String target, final boolean isTargetDirectory) { - if (current == null || target == null || target.isEmpty()) { - return null; - } - - final Path targetPath = current.resolve(target); - if (Files.exists(targetPath)) { - if (isTargetDirectory && Files.isDirectory(targetPath)) { - return current; - } else if (!isTargetDirectory && Files.isRegularFile(targetPath)) { - return current; - } else { - return findParentPathBackwards(current.getParent(), target, isTargetDirectory); - } - } else { - return findParentPathBackwards(current.getParent(), target, isTargetDirectory); - } - } -} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/FileUtils.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/FileUtils.java index efa29999f24..9c4cffdfd4e 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/FileUtils.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/FileUtils.java @@ -1,5 +1,6 @@ package datadog.trace.civisibility.utils; +import de.thetaphi.forbiddenapis.SuppressForbidden; import java.io.IOException; import java.nio.file.FileVisitResult; import java.nio.file.Files; @@ -7,6 +8,7 @@ import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; +@SuppressForbidden public abstract class FileUtils { private FileUtils() {} @@ -29,4 +31,47 @@ public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOEx } }); } + + /** + * Search the parent path that contains the target file. If the current path does not have the + * target file, the method continues with the parent path. If the path is not found, it returns + * null. + * + * @param current + * @param target + * @return the parent path that contains the target file. + */ + public static Path findParentPathBackwards( + final Path current, final String target, final boolean isTargetDirectory) { + if (current == null || target == null || target.isEmpty()) { + return null; + } + + final Path targetPath = current.resolve(target); + if (Files.exists(targetPath)) { + if (isTargetDirectory && Files.isDirectory(targetPath)) { + return current; + } else if (!isTargetDirectory && Files.isRegularFile(targetPath)) { + return current; + } else { + return findParentPathBackwards(current.getParent(), target, isTargetDirectory); + } + } else { + return findParentPathBackwards(current.getParent(), target, isTargetDirectory); + } + } + + public static String expandTilde(final String path) { + if (path == null || !path.startsWith("~")) { + return path; + } + + if (!path.equals("~") && !path.startsWith("~/")) { + // Home dir expansion is not supported for other user. + // Returning path without modifications. + return path; + } + + return path.replaceFirst("^~", System.getProperty("user.home")); + } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/IOUtils.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/IOUtils.java index f2a2735bb0a..320a61ac5e2 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/IOUtils.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/IOUtils.java @@ -1,5 +1,6 @@ package datadog.trace.civisibility.utils; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -22,11 +23,11 @@ public abstract class IOUtils { private IOUtils() {} - public static String readFully(InputStream input) throws IOException { + public static @NonNull String readFully(InputStream input) throws IOException { return readFully(input, Charset.defaultCharset()); } - public static String readFully(InputStream input, Charset charset) throws IOException { + public static @NonNull String readFully(InputStream input, Charset charset) throws IOException { ByteArrayOutputStream output = new ByteArrayOutputStream(); readFully(input, output); return new String(output.toByteArray(), charset); @@ -40,17 +41,17 @@ public static void readFully(InputStream input, OutputStream output) throws IOEx } } - public static List readLines(final InputStream input) throws IOException { + public static @NonNull List readLines(final InputStream input) throws IOException { return readLines(input, Charset.defaultCharset()); } - public static List readLines(final InputStream input, final Charset charset) + public static @NonNull List readLines(final InputStream input, final Charset charset) throws IOException { final InputStreamReader reader = new InputStreamReader(input, charset); return readLines(reader); } - public static List readLines(final Reader input) throws IOException { + public static @NonNull List readLines(final Reader input) throws IOException { final BufferedReader reader = new BufferedReader(input, DEFAULT_BUFFER_SIZE); final List list = new ArrayList<>(); String line = reader.readLine(); diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/PathUtils.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/PathUtils.java deleted file mode 100644 index 30143083fbe..00000000000 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/PathUtils.java +++ /dev/null @@ -1,20 +0,0 @@ -package datadog.trace.civisibility.utils; - -import de.thetaphi.forbiddenapis.SuppressForbidden; - -@SuppressForbidden -public class PathUtils { - public static String expandTilde(final String path) { - if (path == null || !path.startsWith("~")) { - return path; - } - - if (!path.equals("~") && !path.startsWith("~/")) { - // Home dir expansion is not supported for other user. - // Returning path without modifications. - return path; - } - - return path.replaceFirst("^~", System.getProperty("user.home")); - } -} diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java index e4e7af8b74a..640752096d2 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java @@ -115,7 +115,7 @@ private T executeCommand( if (p.waitFor(timeoutMillis, TimeUnit.MILLISECONDS)) { int exitValue = p.exitValue(); if (exitValue != 0) { - throw new IOException( + throw new ShellCommandFailedException( "Command '" + Strings.join(" ", command) + "' failed with exit code " @@ -193,4 +193,10 @@ public interface OutputParser { T parse(InputStream inputStream) throws IOException; } + + public static final class ShellCommandFailedException extends IOException { + public ShellCommandFailedException(String message) { + super(message); + } + } } diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/SpanUtils.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/SpanUtils.java new file mode 100644 index 00000000000..479ac19a1cb --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/SpanUtils.java @@ -0,0 +1,88 @@ +package datadog.trace.civisibility.utils; + +import datadog.trace.api.civisibility.CIConstants; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.Tags; +import java.util.ArrayList; +import java.util.Collection; +import java.util.function.Consumer; + +public class SpanUtils { + public static final Consumer DO_NOT_PROPAGATE_CI_VISIBILITY_TAGS = span -> {}; + + public static Consumer propagateCiVisibilityTagsTo(AgentSpan parentSpan) { + return childSpan -> propagateCiVisibilityTags(parentSpan, childSpan); + } + + public static void propagateCiVisibilityTags(AgentSpan parentSpan, AgentSpan childSpan) { + mergeTag(parentSpan, childSpan, Tags.TEST_FRAMEWORK); + mergeTag(parentSpan, childSpan, Tags.TEST_FRAMEWORK_VERSION); + propagateStatus(parentSpan, childSpan); + } + + public static void mergeTag(AgentSpan parentSpan, AgentSpan childSpan, String tagName) { + mergeTag(parentSpan, tagName, childSpan.getTag(tagName)); + } + + public static void mergeTag(AgentSpan span, String tagName, Object tagValue) { + if (tagValue == null) { + return; + } + + Object existingValue = span.getTag(tagName); + if (existingValue == null) { + span.setTag(tagName, tagValue); + return; + } + + if (existingValue.equals(tagValue)) { + return; + } + + Collection updatedValue = new ArrayList<>(); + if (existingValue instanceof Collection) { + updatedValue.addAll((Collection) existingValue); + } else { + updatedValue.add(existingValue); + } + + if (tagValue instanceof Collection) { + for (Object value : (Collection) tagValue) { + if (!updatedValue.contains(value)) { + updatedValue.add(value); + } + } + } else { + if (!updatedValue.contains(tagValue)) { + updatedValue.add(tagValue); + } + } + span.setTag(tagName, updatedValue); + } + + private static void propagateStatus(AgentSpan parentSpan, AgentSpan childSpan) { + String childStatus = (String) childSpan.getTag(Tags.TEST_STATUS); + if (childStatus == null) { + return; + } + + String parentStatus = (String) parentSpan.getTag(Tags.TEST_STATUS); + switch (childStatus) { + case CIConstants.TEST_PASS: + if (parentStatus == null || CIConstants.TEST_SKIP.equals(parentStatus)) { + parentSpan.setTag(Tags.TEST_STATUS, CIConstants.TEST_PASS); + } + break; + case CIConstants.TEST_FAIL: + parentSpan.setTag(Tags.TEST_STATUS, CIConstants.TEST_FAIL); + break; + case CIConstants.TEST_SKIP: + if (parentStatus == null) { + parentSpan.setTag(Tags.TEST_STATUS, CIConstants.TEST_SKIP); + } + break; + default: + break; + } + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/DDTestImplTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/DDTestImplTest.groovy index f29170cf9be..a372f5c42df 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/DDTestImplTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/DDTestImplTest.groovy @@ -1,18 +1,16 @@ package datadog.trace.civisibility - import datadog.trace.agent.test.asserts.ListWriterAssert import datadog.trace.agent.tooling.TracerInstaller import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.IdGenerationStrategy -import datadog.trace.api.civisibility.InstrumentationBridge import datadog.trace.bootstrap.instrumentation.api.AgentTracer import datadog.trace.civisibility.codeowners.CodeownersImpl -import datadog.trace.civisibility.context.ParentProcessTestContext import datadog.trace.civisibility.coverage.NoopCoverageProbeStore import datadog.trace.civisibility.decorator.TestDecoratorImpl import datadog.trace.civisibility.source.MethodLinesResolver +import datadog.trace.civisibility.utils.SpanUtils import datadog.trace.common.writer.ListWriter import datadog.trace.core.CoreTracer import datadog.trace.test.util.DDSpecification @@ -42,8 +40,6 @@ class DDTestImplTest extends DDSpecification { def agentSpan = callRealMethod() agentSpan } - - InstrumentationBridge.registerCoverageProbeStoreFactory(new NoopCoverageProbeStore.NoopCoverageProbeStoreFactory()) } void cleanupSpec() { @@ -109,30 +105,29 @@ class DDTestImplTest extends DDSpecification { def moduleId = 456 def suiteId = 789 - def moduleContext = new ParentProcessTestContext(sessionId, moduleId) - def suiteContext = new ParentProcessTestContext(moduleId, suiteId) - def config = Config.get() def testDecorator = new TestDecoratorImpl("component", [:]) def sourcePathResolver = { it -> null } def methodLinesResolver = { it -> MethodLinesResolver.MethodLines.EMPTY } def codeowners = CodeownersImpl.EMPTY - + def coverageProbeStoreFactory = new NoopCoverageProbeStore.NoopCoverageProbeStoreFactory() new DDTestImpl( - suiteContext, - moduleContext, + sessionId, + moduleId, + suiteId, "moduleName", "suiteName", "testName", null, null, null, - null, config, testDecorator, sourcePathResolver, methodLinesResolver, - codeowners + codeowners, + coverageProbeStoreFactory, + SpanUtils.DO_NOT_PROPAGATE_CI_VISIBILITY_TAGS ) } diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/AwsCodePipelineInfoTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/AwsCodePipelineInfoTest.groovy new file mode 100644 index 00000000000..bff684f4a00 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/AwsCodePipelineInfoTest.groovy @@ -0,0 +1,30 @@ +package datadog.trace.civisibility.ci + +import java.nio.file.Path + +class AwsCodePipelineInfoTest extends CITagsProviderTest { + + @Override + String getProviderName() { + return AwsCodePipelineInfo.AWS_CODEPIPELINE_PROVIDER_NAME + } + + @Override + Map buildRemoteGitInfoEmpty() { + final Map map = new HashMap<>() + map.put(AwsCodePipelineInfo.AWS_CODEPIPELINE, "codepipeline") + return map + } + + @Override + Map buildRemoteGitInfoMismatchLocalGit() { + final Map map = new HashMap<>() + map.put(AwsCodePipelineInfo.AWS_CODEPIPELINE, "codepipeline") + return map + } + + @Override + Path getWorkspacePath() { + null + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/CITagsProviderTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/CITagsProviderTest.groovy index 776c02d9707..616f7ea18db 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/CITagsProviderTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/CITagsProviderTest.groovy @@ -7,6 +7,7 @@ import datadog.trace.api.git.UserSuppliedGitInfoBuilder import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.civisibility.git.CILocalGitInfoBuilder import datadog.trace.civisibility.git.CIProviderGitInfoBuilder +import datadog.trace.civisibility.git.tree.GitClient import org.junit.Rule import org.junit.contrib.java.lang.system.EnvironmentVariables import org.junit.contrib.java.lang.system.RestoreSystemProperties @@ -167,10 +168,13 @@ abstract class CITagsProviderTest extends Specification { } CITagsProvider ciTagsProvider() { + GitClient.Factory gitClientFactory = Stub(GitClient.Factory) + gitClientFactory.create(_) >> Stub(GitClient) + GitInfoProvider gitInfoProvider = new GitInfoProvider() gitInfoProvider.registerGitInfoBuilder(new UserSuppliedGitInfoBuilder()) gitInfoProvider.registerGitInfoBuilder(new CIProviderGitInfoBuilder()) - gitInfoProvider.registerGitInfoBuilder(new CILocalGitInfoBuilder(GIT_FOLDER_FOR_TESTS)) + gitInfoProvider.registerGitInfoBuilder(new CILocalGitInfoBuilder(gitClientFactory, GIT_FOLDER_FOR_TESTS)) return new CITagsProvider(gitInfoProvider) } diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/UnknownCIInfoTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/UnknownCIInfoTest.groovy index 761e42176c2..333f554c8d4 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/UnknownCIInfoTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ci/UnknownCIInfoTest.groovy @@ -6,12 +6,13 @@ import datadog.trace.api.git.UserSuppliedGitInfoBuilder import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.civisibility.git.CILocalGitInfoBuilder import datadog.trace.civisibility.git.CIProviderGitInfoBuilder +import datadog.trace.civisibility.git.tree.GitClient import java.nio.file.Paths class UnknownCIInfoTest extends CITagsProviderTest { - def workspaceForTests = Paths.get(getClass().getClassLoader().getResource(CITagsProviderTest.CI_WORKSPACE_PATH_FOR_TESTS).toURI()) + def workspaceForTests = Paths.get(getClass().getClassLoader().getResource(CI_WORKSPACE_PATH_FOR_TESTS).toURI()) @Override String getProviderName() { @@ -55,10 +56,13 @@ class UnknownCIInfoTest extends CITagsProviderTest { def "test workspace is null if target folder does not exist"() { when: + def gitClientFactory = Stub(GitClient.Factory) + gitClientFactory.create(_) >> Stub(GitClient) + GitInfoProvider gitInfoProvider = new GitInfoProvider() gitInfoProvider.registerGitInfoBuilder(new UserSuppliedGitInfoBuilder()) gitInfoProvider.registerGitInfoBuilder(new CIProviderGitInfoBuilder()) - gitInfoProvider.registerGitInfoBuilder(new CILocalGitInfoBuilder("this-target-folder-does-not-exist")) + gitInfoProvider.registerGitInfoBuilder(new CILocalGitInfoBuilder(gitClientFactory, "this-target-folder-does-not-exist")) CIProviderInfoFactory ciProviderInfoFactory = new CIProviderInfoFactory(Config.get(), "this-target-folder-does-not-exist") def ciProviderInfo = ciProviderInfoFactory.createCIProviderInfo(workspaceForTests) diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/config/JvmInfoFactoryTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/config/JvmInfoFactoryTest.groovy index 0bfa76e175e..db6a991d2df 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/config/JvmInfoFactoryTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/config/JvmInfoFactoryTest.groovy @@ -1,6 +1,6 @@ package datadog.trace.civisibility.config -import datadog.trace.api.civisibility.config.JvmInfo + import datadog.trace.util.ProcessUtils import spock.lang.Specification diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/decorator/TestDecoratorImplTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/decorator/TestDecoratorImplTest.groovy index f343dcf44ac..48a15b6bde0 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/decorator/TestDecoratorImplTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/decorator/TestDecoratorImplTest.groovy @@ -40,14 +40,6 @@ class TestDecoratorImplTest extends Specification { serviceName << ["test-service", "other-service", null] } - def "test beforeFinish"() { - when: - newDecorator().beforeFinish(span) - - then: - 0 * _ - } - static newDecorator() { new TestDecoratorImpl("test-component", ["ci-tag-1": "value", "ci-tag-2": "another value"]) } diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/CILocalGitInfoBuilderTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/CILocalGitInfoBuilderTest.groovy index d484f28bfdc..e040aa33af8 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/CILocalGitInfoBuilderTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/CILocalGitInfoBuilderTest.groovy @@ -1,6 +1,6 @@ package datadog.trace.civisibility.git - +import datadog.trace.civisibility.git.tree.GitClient import spock.lang.Specification import java.nio.file.Paths @@ -9,7 +9,10 @@ class CILocalGitInfoBuilderTest extends Specification { def "returns empty git info when repository path is not specified"() { setup: - def builder = new CILocalGitInfoBuilder(".git") + def gitClientFactory = Stub(GitClient.Factory) + gitClientFactory.create(_) >> Stub(GitClient) + + def builder = new CILocalGitInfoBuilder(gitClientFactory,".git") when: def gitInfo = builder.build(null) @@ -21,7 +24,10 @@ class CILocalGitInfoBuilderTest extends Specification { def "parses git info"() { setup: - def builder = new CILocalGitInfoBuilder("git_folder_for_tests") + def gitClientFactory = Stub(GitClient.Factory) + gitClientFactory.create(_) >> Stub(GitClient) + + def builder = new CILocalGitInfoBuilder(gitClientFactory, "git_folder_for_tests") def workspace = resolve("ci/ci_workspace_for_tests") when: diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/GitClientGitInfoBuilderTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/GitClientGitInfoBuilderTest.groovy new file mode 100644 index 00000000000..d9b1cf143fa --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/GitClientGitInfoBuilderTest.groovy @@ -0,0 +1,63 @@ +package datadog.trace.civisibility.git + +import datadog.trace.api.Config +import datadog.trace.civisibility.git.tree.GitClient +import datadog.trace.civisibility.utils.IOUtils +import spock.lang.Specification +import spock.lang.TempDir + +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths + +class GitClientGitInfoBuilderTest extends Specification { + + private static final int GIT_COMMAND_TIMEOUT_MILLIS = 10_000 + + private static final String GIT_FOLDER = ".git" + + @TempDir + private Path tempDir + + def "test git client info builder"() { + given: + givenGitRepo() + + def config = Stub(Config) + config.getCiVisibilityGitRemoteName() >> "origin" + config.getCiVisibilityGitCommandTimeoutMillis() >> GIT_COMMAND_TIMEOUT_MILLIS + + def gitClientFactory = new GitClient.Factory(config) + def infoBuilder = new GitClientGitInfoBuilder(config, gitClientFactory) + + when: + def gitInfo = infoBuilder.build(tempDir.toAbsolutePath().toString()) + + then: + gitInfo.repositoryURL == "git@github.com:DataDog/dd-trace-dotnet.git" + gitInfo.branch == "master" + gitInfo.tag == null + gitInfo.commit.sha == "5b6f3a6dab5972d73a56dff737bd08d995255c08" + gitInfo.commit.author.name == "Tony Redondo" + gitInfo.commit.author.email == "tony.redondo@datadoghq.com" + gitInfo.commit.author.iso8601Date == "2021-02-26T19:32:13+01:00" + gitInfo.commit.committer.name == "GitHub" + gitInfo.commit.committer.email == "noreply@github.com" + gitInfo.commit.committer.iso8601Date == "2021-02-26T19:32:13+01:00" + gitInfo.commit.fullMessage == "Adding Git information to test spans (#1242)\n\n" + + "* Initial basic GitInfo implementation.\r\n\r\n" + + "* Adds Author, Committer and Message git parser.\r\n\r\n" + + "* Changes based on the review." + } + + private void givenGitRepo() { + givenGitRepo("ci/git/with_pack/git") + } + + private void givenGitRepo(String resourceName) { + def gitFolder = Paths.get(getClass().getClassLoader().getResource(resourceName).toURI()) + def tempGitFolder = tempDir.resolve(GIT_FOLDER) + Files.createDirectories(tempGitFolder) + IOUtils.copyFolder(gitFolder, tempGitFolder) + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/tree/GitClientTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/tree/GitClientTest.groovy index 5a41e389a7f..68f29205469 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/tree/GitClientTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/tree/GitClientTest.groovy @@ -14,6 +14,8 @@ class GitClientTest extends Specification { private static final int GIT_COMMAND_TIMEOUT_MILLIS = 10_000 + private static final String GIT_FOLDER = ".git" + @TempDir private Path tempDir @@ -41,7 +43,19 @@ class GitClientTest extends Specification { shallow } - def "test unshallow"() { + def "test get upstream branch SHA"() { + given: + givenGitRepo("ci/git/shallow/git") + + when: + def gitClient = givenGitClient() + def upstreamBranch = gitClient.getUpstreamBranchSha() + + then: + upstreamBranch == "98b944cc44f18bfb78e3021de2999cdcda8efdf6" + } + + def "test unshallow: #remoteSha"() { given: givenGitRepo("ci/git/shallow/git") @@ -55,13 +69,28 @@ class GitClientTest extends Specification { commits.size() == 1 when: - gitClient.unshallow() + gitClient.unshallow(remoteSha) shallow = gitClient.isShallow() commits = gitClient.getLatestCommits() then: !shallow commits.size() == 10 + + where: + remoteSha << [GitClient.HEAD, null] + } + + def "test get git folder"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def folder = gitClient.getGitFolder() + + then: + folder == tempDir.resolve(GIT_FOLDER).toRealPath().toString() } def "test get remote url"() { @@ -76,6 +105,129 @@ class GitClientTest extends Specification { remoteUrl == "git@github.com:DataDog/dd-trace-dotnet.git" } + def "test get current branch"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def branch = gitClient.getCurrentBranch() + + then: + branch == "master" + } + + def "test get tags"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def tags = gitClient.getTags(GitClient.HEAD) + + then: + tags.empty + } + + def "test get sha"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def sha = gitClient.getSha(GitClient.HEAD) + + then: + sha == "5b6f3a6dab5972d73a56dff737bd08d995255c08" + } + + def "test get full message"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def message = gitClient.getFullMessage(GitClient.HEAD) + + then: + message == "Adding Git information to test spans (#1242)\n\n" + + "* Initial basic GitInfo implementation.\r\n\r\n" + + "* Adds Author, Committer and Message git parser.\r\n\r\n" + + "* Changes based on the review." + } + + def "test get author name"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def authorName = gitClient.getAuthorName(GitClient.HEAD) + + then: + authorName == "Tony Redondo" + } + + def "test get author email"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def authorEmail = gitClient.getAuthorEmail(GitClient.HEAD) + + then: + authorEmail == "tony.redondo@datadoghq.com" + } + + def "test get author date"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def authorDate = gitClient.getAuthorDate(GitClient.HEAD) + + then: + authorDate == "2021-02-26T19:32:13+01:00" + } + + def "test get committer name"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def authorName = gitClient.getCommitterName(GitClient.HEAD) + + then: + authorName == "GitHub" + } + + def "test get committer email"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def authorEmail = gitClient.getCommitterEmail(GitClient.HEAD) + + then: + authorEmail == "noreply@github.com" + } + + def "test get committer date"() { + given: + givenGitRepo() + + when: + def gitClient = givenGitClient() + def authorDate = gitClient.getCommitterDate(GitClient.HEAD) + + then: + authorDate == "2021-02-26T19:32:13+01:00" + } + def "test get latest commits"() { given: givenGitRepo() @@ -154,7 +306,7 @@ class GitClientTest extends Specification { private void givenGitRepo(String resourceName) { def gitFolder = Paths.get(getClass().getClassLoader().getResource(resourceName).toURI()) - def tempGitFolder = tempDir.resolve(".git") + def tempGitFolder = tempDir.resolve(GIT_FOLDER) Files.createDirectories(tempGitFolder) IOUtils.copyFolder(gitFolder, tempGitFolder) } diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/tree/GitDataUploaderImplTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/tree/GitDataUploaderImplTest.groovy index ef664bfeca7..34685918a74 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/tree/GitDataUploaderImplTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/git/tree/GitDataUploaderImplTest.groovy @@ -1,6 +1,8 @@ package datadog.trace.civisibility.git.tree import datadog.trace.api.Config +import datadog.trace.api.git.GitInfo +import datadog.trace.api.git.GitInfoProvider import datadog.trace.civisibility.utils.IOUtils import org.hamcrest.Description import org.hamcrest.Matcher @@ -24,19 +26,25 @@ class GitDataUploaderImplTest extends Specification { given: givenGitRepo() + def repoRoot = tempDir.toString() + def repoUrl = "" + def config = Stub(Config) { getCiVisibilityGitUploadTimeoutMillis() >> 15_000 } def api = Mock(GitDataApi) - def gitClient = new GitClient(tempDir.toString(), "25 years ago", 3, TIMEOUT_MILLIS) - def uploader = new GitDataUploaderImpl(config, api, gitClient, "origin") + def gitInfoProvider = Stub(GitInfoProvider) + gitInfoProvider.getGitInfo(repoRoot) >> new GitInfo(repoUrl, null, null, null) + + def gitClient = new GitClient(repoRoot, "25 years ago", 3, TIMEOUT_MILLIS) + def uploader = new GitDataUploaderImpl(config, api, gitClient, gitInfoProvider, repoRoot, "origin") when: def future = uploader.startOrObserveGitDataUpload() future.get(TIMEOUT_MILLIS, TimeUnit.MILLISECONDS) then: - 1 * api.searchCommits("git@github.com:DataDog/dd-trace-dotnet.git", [ + 1 * api.searchCommits(repoUrl, [ "5b6f3a6dab5972d73a56dff737bd08d995255c08", "98cd7c8e9cf71e02dc28bd9b13928bee0f85b74c", "31ca182c0474f6265e660498c4fbcf775e23bba0", @@ -46,7 +54,7 @@ class GitDataUploaderImplTest extends Specification { ] 1 * api.uploadPackFile( - "git@github.com:DataDog/dd-trace-dotnet.git", + repoUrl, "5b6f3a6dab5972d73a56dff737bd08d995255c08", fileWithContents(Paths.get(getClass().getClassLoader().getResource("ci/git/uploadedPackFile.pack").toURI()))) 0 * _ diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/ChannelContextTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/ChannelContextTest.groovy index 0f5d508380a..5281d6abae7 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/ChannelContextTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/ChannelContextTest.groovy @@ -1,5 +1,6 @@ package datadog.trace.civisibility.ipc +import datadog.trace.api.Platform import spock.lang.IgnoreIf import spock.lang.Specification @@ -7,8 +8,8 @@ import java.nio.ByteBuffer import java.nio.channels.ByteChannel import java.util.concurrent.ThreadLocalRandom -@IgnoreIf(reason = "JVM crash with IBM JDK", value = { - System.getProperty("java.vendor").contains("IBM") && System.getProperty("java.version").contains("1.8.") +@IgnoreIf(reason = "JVM crash with OpenJ9", value = { + Platform.isJ9() }) class ChannelContextTest extends Specification { diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/ModuleExecutionResultTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/ModuleExecutionResultTest.groovy index 803eee8d37e..30acccbd18d 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/ModuleExecutionResultTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/ModuleExecutionResultTest.groovy @@ -14,13 +14,23 @@ class ModuleExecutionResultTest extends Specification { then: deserialized == signal + + where: signal << [ - new ModuleExecutionResult(12345, 67890, false, false, 0, null, null), - new ModuleExecutionResult(12345, 67890, true, false, 1, "junit", "4.13.2"), - new ModuleExecutionResult(12345, 67890, false, true, 2, null, "4.13.2"), - new ModuleExecutionResult(12345, 67890, false, false, 3, "junit", null), - new ModuleExecutionResult(12345, 67890, true, true, Integer.MAX_VALUE, "junit", "4.13.2") + new ModuleExecutionResult(12345, 67890, false, false, 0, Collections.emptyList(), null), + new ModuleExecutionResult(12345, 67890, true, false, 1, Collections.singletonList(new TestFramework("junit", "4.13.2")), new byte[] { + 1, 2, 3 + }), + new ModuleExecutionResult(12345, 67890, false, true, 2, Arrays.asList(new TestFramework("junit", "4.13.2"), new TestFramework("junit", "5.9.2")), new byte[] { + 1, 2, 3 + }), + new ModuleExecutionResult(12345, 67890, false, false, 3, Arrays.asList(new TestFramework("junit", null), new TestFramework("junit", "5.9.2")), new byte[] { + 1, 2, 3 + }), + new ModuleExecutionResult(12345, 67890, true, true, Integer.MAX_VALUE, Arrays.asList(new TestFramework("junit", "4.13.2"), new TestFramework(null, "5.9.2")), new byte[] { + 1, 2, 3 + }) ] } diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/SignalServerTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/SignalServerTest.groovy index 8842629b460..6de136b75a4 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/SignalServerTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/SignalServerTest.groovy @@ -9,7 +9,9 @@ class SignalServerTest extends Specification { def "test message send and receive"() { given: def signalProcessed = new AtomicBoolean(false) - def signal = new ModuleExecutionResult(123, 456, true, true, 1, "junit", "4.13.2") + def signal = new ModuleExecutionResult(123, 456, true, true, 1, Collections.singletonList(new TestFramework("junit", "4.13.2")), new byte[] { + 1, 2, 3 + }) def server = new SignalServer() def received = new ArrayList() @@ -38,8 +40,10 @@ class SignalServerTest extends Specification { def "test multiple messages send and receive"() { given: - def signalA = new ModuleExecutionResult(123, 456, false, false, 0, "junit", "4.13.2") - def signalB = new ModuleExecutionResult(234, 567, true, true, 1, "junit", "4.13.2") + def signalA = new ModuleExecutionResult(123, 456, false, false, 0, Collections.singletonList(new TestFramework("junit", "4.13.2")), new byte[] { + 1, 2, 3 + }) + def signalB = new ModuleExecutionResult(234, 567, true, true, 1, Collections.singletonList(new TestFramework("junit", "4.13.2")), null) def server = new SignalServer() def received = new ArrayList() @@ -67,8 +71,10 @@ class SignalServerTest extends Specification { def "test multiple clients send and receive"() { given: - def signalA = new ModuleExecutionResult(123, 456, true, false, 1, "junit", "4.13.2") - def signalB = new ModuleExecutionResult(234, 567, false, true, 0, "junit", "4.13.2") + def signalA = new ModuleExecutionResult(123, 456, true, false, 1, Collections.singletonList(new TestFramework("junit", "4.13.2")), new byte[] { + 1, 2, 3 + }) + def signalB = new ModuleExecutionResult(234, 567, false, true, 0, Collections.singletonList(new TestFramework("junit", "4.13.2")), null) def server = new SignalServer() def received = new ArrayList() @@ -115,7 +121,9 @@ class SignalServerTest extends Specification { when: def address = server.getAddress() try (def client = new SignalClient(address, clientTimeoutMillis)) { - client.send(new ModuleExecutionResult(123, 456, false, false, 0, "junit", "4.13.2")) + client.send(new ModuleExecutionResult(123, 456, false, false, 0, Collections.singletonList(new TestFramework("junit", "4.13.2")), new byte[] { + 1, 2, 3 + })) } then: @@ -127,7 +135,9 @@ class SignalServerTest extends Specification { def "test error response receipt"() { given: - def signal = new ModuleExecutionResult(123, 456, true, true, 1, "junit", "4.13.2") + def signal = new ModuleExecutionResult(123, 456, true, true, 1, Collections.singletonList(new TestFramework("junit", "4.13.2")), new byte[] { + 1, 2, 3 + }) def server = new SignalServer() def errorResponse = new ErrorResponse("An error occurred while processing the signal") diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/SkippableTestsRequestTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/SkippableTestsRequestTest.groovy index 4fd81e75e2f..0f2fb01a3c2 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/SkippableTestsRequestTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/ipc/SkippableTestsRequestTest.groovy @@ -1,6 +1,6 @@ package datadog.trace.civisibility.ipc -import datadog.trace.api.civisibility.config.JvmInfo +import datadog.trace.civisibility.config.JvmInfo import spock.lang.Specification import java.nio.file.Paths diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/BestEffortMethodLinesResolverTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/BestEffortMethodLinesResolverTest.groovy new file mode 100644 index 00000000000..4076a076279 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/BestEffortMethodLinesResolverTest.groovy @@ -0,0 +1,67 @@ +package datadog.trace.civisibility.source + + +import spock.lang.Specification + +class BestEffortMethodLinesResolverTest extends Specification { + + def "test get source info from delegate"() { + setup: + def testMethod = TestClass.getDeclaredMethod("testMethod") + def expectedLines = new MethodLinesResolver.MethodLines(42, 43) + + def delegate = Stub(MethodLinesResolver) + def secondDelegate = Stub(MethodLinesResolver) + def resolver = new BestEffortMethodLinesResolver(delegate, secondDelegate) + + delegate.getLines(testMethod) >> expectedLines + secondDelegate.getLines(testMethod) >> MethodLinesResolver.MethodLines.EMPTY + + when: + def lines = resolver.getLines(testMethod) + + then: + lines == expectedLines + } + + def "test get source info from second delegate"() { + setup: + def testMethod = TestClass.getDeclaredMethod("testMethod") + def expectedLines = new MethodLinesResolver.MethodLines(42, 43) + + def delegate = Stub(MethodLinesResolver) + def secondDelegate = Stub(MethodLinesResolver) + def resolver = new BestEffortMethodLinesResolver(delegate, secondDelegate) + + delegate.getLines(testMethod) >> MethodLinesResolver.MethodLines.EMPTY + secondDelegate.getLines(testMethod) >> expectedLines + + when: + def lines = resolver.getLines(testMethod) + + then: + lines == expectedLines + } + + def "test failed to get info from both delegates"() { + setup: + def testMethod = TestClass.getDeclaredMethod("testMethod") + + def delegate = Stub(MethodLinesResolver) + def secondDelegate = Stub(MethodLinesResolver) + def resolver = new BestEffortMethodLinesResolver(delegate, secondDelegate) + + delegate.getLines(testMethod) >> MethodLinesResolver.MethodLines.EMPTY + secondDelegate.getLines(testMethod) >> MethodLinesResolver.MethodLines.EMPTY + + when: + def lines = resolver.getLines(testMethod) + + then: + lines == MethodLinesResolver.MethodLines.EMPTY + } + + private static final class TestClass { + void testMethod() {} + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/BestEffortSourcePathResolverTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/BestEffortSourcePathResolverTest.groovy index 4020037ac00..23ae4fd48e5 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/BestEffortSourcePathResolverTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/BestEffortSourcePathResolverTest.groovy @@ -1,6 +1,6 @@ package datadog.trace.civisibility.source -import datadog.trace.api.civisibility.source.SourcePathResolver + import spock.lang.Specification class BestEffortSourcePathResolverTest extends Specification { @@ -10,7 +10,7 @@ class BestEffortSourcePathResolverTest extends Specification { def expectedPath = "source/path/TestClass.java" def delegate = Stub(SourcePathResolver) def secondDelegate = Stub(SourcePathResolver) - def resolver = new BestEfforSourcePathResolver(delegate, secondDelegate) + def resolver = new BestEffortSourcePathResolver(delegate, secondDelegate) delegate.getSourcePath(TestClass) >> expectedPath secondDelegate.getSourcePath(TestClass) >> null @@ -27,7 +27,7 @@ class BestEffortSourcePathResolverTest extends Specification { def expectedPath = "source/path/TestClass.java" def delegate = Stub(SourcePathResolver) def secondDelegate = Stub(SourcePathResolver) - def resolver = new BestEfforSourcePathResolver(delegate, secondDelegate) + def resolver = new BestEffortSourcePathResolver(delegate, secondDelegate) delegate.getSourcePath(TestClass) >> null secondDelegate.getSourcePath(TestClass) >> expectedPath @@ -43,7 +43,7 @@ class BestEffortSourcePathResolverTest extends Specification { setup: def delegate = Stub(SourcePathResolver) def secondDelegate = Stub(SourcePathResolver) - def resolver = new BestEfforSourcePathResolver(delegate, secondDelegate) + def resolver = new BestEffortSourcePathResolver(delegate, secondDelegate) delegate.getSourcePath(TestClass) >> null secondDelegate.getSourcePath(TestClass) >> null diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/MethodLinesResolverImplTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/ByteCodeMethodLinesResolverTest.groovy similarity index 87% rename from dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/MethodLinesResolverImplTest.groovy rename to dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/ByteCodeMethodLinesResolverTest.groovy index 4e7c8862496..9efb2399ed4 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/MethodLinesResolverImplTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/ByteCodeMethodLinesResolverTest.groovy @@ -4,14 +4,14 @@ package datadog.trace.civisibility.source import org.spockframework.util.IoUtil import spock.lang.Specification -class MethodLinesResolverImplTest extends Specification { +class ByteCodeMethodLinesResolverTest extends Specification { def "test method lines resolution"() { setup: def aTestMethod = NestedClass.getDeclaredMethod("aTestMethod") when: - def methodLinesResolver = new MethodLinesResolverImpl() + def methodLinesResolver = new ByteCodeMethodLinesResolver() def methodLines = methodLinesResolver.getLines(aTestMethod) then: @@ -25,7 +25,7 @@ class MethodLinesResolverImplTest extends Specification { def aTestMethod = NestedClass.getDeclaredMethod("abstractMethod") when: - def methodLinesResolver = new MethodLinesResolverImpl() + def methodLinesResolver = new ByteCodeMethodLinesResolver() def methodLines = methodLinesResolver.getLines(aTestMethod) then: @@ -46,7 +46,7 @@ class MethodLinesResolverImplTest extends Specification { def misbehavingMethod = misbehavingClass.getDeclaredMethod("aTestMethod") when: - def methodLinesResolver = new MethodLinesResolverImpl() + def methodLinesResolver = new ByteCodeMethodLinesResolver() def methodLines = methodLinesResolver.getLines(misbehavingMethod) then: @@ -56,7 +56,7 @@ class MethodLinesResolverImplTest extends Specification { def "test returns empty method lines when unknown method is attempted to be resolved"() { setup: def aTestMethod = NestedClass.getDeclaredMethod("abstractMethod") - def classMethodLines = new MethodLinesResolverImpl.ClassMethodLines() + def classMethodLines = new ByteCodeMethodLinesResolver.ClassMethodLines() when: def methodLines = classMethodLines.get(aTestMethod) diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/CompilerAidedMethodLinesResolverTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/CompilerAidedMethodLinesResolverTest.groovy new file mode 100644 index 00000000000..390f3309447 --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/CompilerAidedMethodLinesResolverTest.groovy @@ -0,0 +1,40 @@ +package datadog.trace.civisibility.source + +import datadog.compiler.annotations.MethodLines +import spock.lang.Specification + +class CompilerAidedMethodLinesResolverTest extends Specification { + + def "test source info retrieval for #methodName"() { + setup: + def resolver = new CompilerAidedMethodLinesResolver() + def method = TestClass.getDeclaredMethod(methodName) + + when: + def lines = resolver.getLines(method) + + then: + lines.valid == expectedValid + + if (lines.valid) { + lines.startLineNumber == expectedStart + lines.finishLineNumber == expectedFinish + } + + where: + methodName | expectedValid | expectedStart | expectedFinish + "methodWithNoLinesInfoInjected" | false | -1 | -1 + "methodWithLinesInfoInjected" | true | 10 | 20 + "methodWithUnknownLinesInfoInjected" | false | -1 | -1 + } + + private static final class TestClass { + void methodWithNoLinesInfoInjected() {} + + @MethodLines(start = 10, end = 20) + void methodWithLinesInfoInjected() {} + + @MethodLines(start = -1, end = -1) + void methodWithUnknownLinesInfoInjected() {} + } +} diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/SourceRootResolverImplTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/PackageResolverImplTest.groovy similarity index 78% rename from dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/SourceRootResolverImplTest.groovy rename to dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/PackageResolverImplTest.groovy index a49d8cc4729..9dae5c64903 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/SourceRootResolverImplTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/PackageResolverImplTest.groovy @@ -6,9 +6,9 @@ import spock.lang.Specification import java.nio.file.Files -class SourceRootResolverImplTest extends Specification { +class PackageResolverImplTest extends Specification { - def "test source root resolution"() { + def "test source root resolution: #path"() { setup: def fileSystem = Jimfs.newFileSystem(Configuration.unix()) def javaFilePath = fileSystem.getPath(path) @@ -17,22 +17,22 @@ class SourceRootResolverImplTest extends Specification { Files.write(javaFilePath, contents.getBytes()) when: - def sourceRootResolver = new SourceRootResolverImpl(fileSystem) - def sourceRoot = sourceRootResolver.getSourceRoot(javaFilePath) + def packageResolver = new PackageResolverImpl(fileSystem) + def packagePath = packageResolver.getPackage(javaFilePath) then: - sourceRoot == fileSystem.getPath(expectedSourceRoot) + packagePath == fileSystem.getPath(expectedPackageName) where: - path | contents | expectedSourceRoot - "/root/src/MyClass.java" | CLASS_IN_DEFAULT_PACKAGE | "/root/src" - "/root/src/foo/bar/MyClass.java" | CLASS_IN_FOO_BAR_PACKAGE | "/root/src" - "/root/src/foo/bar/MyClass.java" | BLANK_LINES_BEFORE_PACKAGE | "/root/src" - "/root/src/foo/bar/MyClass.java" | SPACES_BEFORE_PACKAGE | "/root/src" - "/root/src/foo/bar/MyClass.java" | COMMENT_BEFORE_PACKAGE | "/root/src" - "/root/src/foo/bar/MyClass.java" | COMMENT_WITH_KEYWORD_BEFORE_PACKAGE | "/root/src" - "/root/src/foo/bar/MyClass.java" | MULTILINE_COMMENT_BEFORE_PACKAGE | "/root/src" - "/root/src/foo/bar/MyClass.java" | MULTILINE_COMMENT_WITH_KEYWORD_BEFORE_PACKAGE | "/root/src" + path | contents | expectedPackageName + "/root/src/MyClass.java" | CLASS_IN_DEFAULT_PACKAGE | "" + "/root/src/foo/bar/MyClass.java" | CLASS_IN_FOO_BAR_PACKAGE | "foo/bar" + "/root/src/foo/bar/MyClass.java" | BLANK_LINES_BEFORE_PACKAGE | "foo/bar" + "/root/src/foo/bar/MyClass.java" | SPACES_BEFORE_PACKAGE | "foo/bar" + "/root/src/foo/bar/MyClass.java" | COMMENT_BEFORE_PACKAGE | "foo/bar" + "/root/src/foo/bar/MyClass.java" | COMMENT_WITH_KEYWORD_BEFORE_PACKAGE | "foo/bar" + "/root/src/foo/bar/MyClass.java" | MULTILINE_COMMENT_BEFORE_PACKAGE | "foo/bar" + "/root/src/foo/bar/MyClass.java" | MULTILINE_COMMENT_WITH_KEYWORD_BEFORE_PACKAGE | "foo/bar" } private static final String CLASS_IN_DEFAULT_PACKAGE = diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/RepoIndexSourcePathResolverTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/RepoIndexSourcePathResolverTest.groovy index ad788bb8ca2..59a29662254 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/RepoIndexSourcePathResolverTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/RepoIndexSourcePathResolverTest.groovy @@ -10,7 +10,7 @@ import java.nio.file.Path class RepoIndexSourcePathResolverTest extends Specification { - def sourceRootResolver = Stub(SourceRootResolver) + def packageResolver = Stub(PackageResolver) def fileSystem = Jimfs.newFileSystem(Configuration.unix()) def repoRoot = getRepoRoot() @@ -19,7 +19,7 @@ class RepoIndexSourcePathResolverTest extends Specification { def expectedSourcePath = givenSourceFile(RepoIndexSourcePathResolverTest, repoRoot + "/src") when: - def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, sourceRootResolver, fileSystem) + def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, packageResolver, fileSystem) then: sourcePathResolver.getSourcePath(RepoIndexSourcePathResolverTest) == expectedSourcePath @@ -30,7 +30,7 @@ class RepoIndexSourcePathResolverTest extends Specification { def expectedSourcePath = givenSourceFile(RepoIndexSourcePathResolverTest, repoRoot + "/src") when: - def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, sourceRootResolver, fileSystem) + def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, packageResolver, fileSystem) then: sourcePathResolver.getSourcePath(InnerClass) == expectedSourcePath @@ -41,7 +41,7 @@ class RepoIndexSourcePathResolverTest extends Specification { def expectedSourcePath = givenSourceFile(RepoIndexSourcePathResolverTest, repoRoot + "/src") when: - def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, sourceRootResolver, fileSystem) + def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, packageResolver, fileSystem) then: sourcePathResolver.getSourcePath(InnerClass.NestedInnerClass) == expectedSourcePath @@ -52,7 +52,7 @@ class RepoIndexSourcePathResolverTest extends Specification { def expectedSourcePath = givenSourceFile(RepoIndexSourcePathResolverTest, repoRoot + "/src") when: - def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, sourceRootResolver, fileSystem) + def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, packageResolver, fileSystem) def r = new Runnable() { void run() {} } @@ -66,7 +66,7 @@ class RepoIndexSourcePathResolverTest extends Specification { def expectedSourcePath = givenSourceFile(RepoIndexSourcePathResolverTest, repoRoot + "/src") when: - def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, sourceRootResolver, fileSystem) + def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, packageResolver, fileSystem) then: sourcePathResolver.getSourcePath(PackagePrivateClass) == expectedSourcePath @@ -77,7 +77,7 @@ class RepoIndexSourcePathResolverTest extends Specification { def expectedSourcePath = givenSourceFile(RepoIndexSourcePathResolverTest, repoRoot + "/src") when: - def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, sourceRootResolver, fileSystem) + def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, packageResolver, fileSystem) then: sourcePathResolver.getSourcePath(PublicClassWhoseNameDoesNotCorrespondToFileName) == expectedSourcePath @@ -87,7 +87,7 @@ class RepoIndexSourcePathResolverTest extends Specification { setup: when: - def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, sourceRootResolver, fileSystem) + def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, packageResolver, fileSystem) then: sourcePathResolver.getSourcePath(RepoIndexSourcePathResolver) == null @@ -97,7 +97,7 @@ class RepoIndexSourcePathResolverTest extends Specification { setup: when: - def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, sourceRootResolver, fileSystem) + def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, packageResolver, fileSystem) then: sourcePathResolver.getSourcePath(PackagePrivateClass) == null @@ -106,13 +106,13 @@ class RepoIndexSourcePathResolverTest extends Specification { def "test file-indexing failure"() { setup: def classPath = fileSystem.getPath(generateSourceFileName(RepoIndexSourcePathResolverTest, repoRoot)) - sourceRootResolver.getSourceRoot(classPath) >> { throw new IOException() } + packageResolver.getPackage(classPath) >> { throw new IOException() } Files.createDirectories(classPath.getParent()) Files.write(classPath, "STUB CLASS BODY".getBytes()) when: - def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, sourceRootResolver, fileSystem) + def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, packageResolver, fileSystem) then: sourcePathResolver.getSourcePath(RepoIndexSourcePathResolverTest) == null @@ -126,7 +126,7 @@ class RepoIndexSourcePathResolverTest extends Specification { givenRepoFile(fileSystem.getPath(repoRoot, "README.md")) when: - def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, sourceRootResolver, fileSystem) + def sourcePathResolver = new RepoIndexSourcePathResolver(repoRoot, packageResolver, fileSystem) then: sourcePathResolver.getSourcePath(RepoIndexSourcePathResolverTest) == expectedSourcePathOne @@ -136,7 +136,7 @@ class RepoIndexSourcePathResolverTest extends Specification { private String givenSourceFile(Class c, String sourceRoot, SourceType sourceType = SourceType.GROOVY) { def classPath = fileSystem.getPath(generateSourceFileName(c, sourceRoot, sourceType)) - sourceRootResolver.getSourceRoot(classPath) >> fileSystem.getPath(sourceRoot) + packageResolver.getPackage(classPath) >> fileSystem.getPath(sourceRoot).relativize(classPath).getParent() givenRepoFile(classPath) diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/RepoIndexTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/RepoIndexTest.groovy index 4d3cbe45a32..47dc4e42084 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/RepoIndexTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/source/index/RepoIndexTest.groovy @@ -12,7 +12,7 @@ class RepoIndexTest extends Specification { trieBuilder.put(RepoIndexSourcePathResolverTest.name + SourceType.GROOVY.extension, 1) def sourceRoots = Arrays.asList("myClassSourceRoot", "myOtherClassSourceRoot") - def repoIndex = new RepoIndex(trieBuilder.buildTrie(), sourceRoots) + def repoIndex = new RepoIndex(trieBuilder.buildTrie(), sourceRoots, Collections.emptyList()) when: def serialized = repoIndex.serialize() diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/utils/CIUtilsTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/utils/CIUtilsTest.groovy deleted file mode 100644 index ecc48f7fafe..00000000000 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/utils/CIUtilsTest.groovy +++ /dev/null @@ -1,42 +0,0 @@ -package datadog.trace.civisibility.utils - - -import spock.lang.Specification - -import java.nio.file.Paths - -class CIUtilsTest extends Specification { - - static workspace = resolve("ci/utils/workspace") - static innerWorkspace = resolve("ci/utils/workspace/innerworkspace") - - def "test find path backwards "() { - when: - def result = CIUtils.findParentPathBackwards(path, target, isDirectory) - - then: - result == expectedResult - - where: - path | target | isDirectory | expectedResult - null | null | false | null - workspace | null | false | null - workspace | "" | false | null - workspace | "not-exists" | true | null - workspace | "targetFolder" | false | null - workspace | "targetFolder" | true | workspace - workspace | "targetFile.txt" | false | workspace - workspace | "targetFile.txt" | true | null - innerWorkspace | "targetFolder" | true | workspace - innerWorkspace | "targetFolder" | false | null - innerWorkspace | "targetFile.txt" | true |null - innerWorkspace | "targetFile.txt" | false | workspace - innerWorkspace | "otherTargetFolder" | true | workspace - } - - static "resolve"(workspace) { - def resolvedWS = Paths.get(CIUtilsTest.getClassLoader().getResource(workspace).toURI()) - println(resolvedWS.toString()) - return resolvedWS - } -} diff --git a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/utils/FileUtilsTest.groovy b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/utils/FileUtilsTest.groovy index 2af30de033c..baf8f01cb17 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/utils/FileUtilsTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/test/groovy/datadog/trace/civisibility/utils/FileUtilsTest.groovy @@ -5,6 +5,7 @@ import spock.lang.TempDir import java.nio.file.Files import java.nio.file.Path +import java.nio.file.Paths class FileUtilsTest extends Specification { @@ -23,4 +24,37 @@ class FileUtilsTest extends Specification { then: !Files.exists(temporaryFolder) } + + static workspace = resolve("ci/utils/workspace") + static innerWorkspace = resolve("ci/utils/workspace/innerworkspace") + + def "test find path backwards "() { + when: + def result = FileUtils.findParentPathBackwards(path, target, isDirectory) + + then: + result == expectedResult + + where: + path | target | isDirectory | expectedResult + null | null | false | null + workspace | null | false | null + workspace | "" | false | null + workspace | "not-exists" | true | null + workspace | "targetFolder" | false | null + workspace | "targetFolder" | true | workspace + workspace | "targetFile.txt" | false | workspace + workspace | "targetFile.txt" | true | null + innerWorkspace | "targetFolder" | true | workspace + innerWorkspace | "targetFolder" | false | null + innerWorkspace | "targetFile.txt" | true |null + innerWorkspace | "targetFile.txt" | false | workspace + innerWorkspace | "otherTargetFolder" | true | workspace + } + + static "resolve"(workspace) { + def resolvedWS = Paths.get(FileUtilsTest.getClassLoader().getResource(workspace).toURI()) + println(resolvedWS.toString()) + return resolvedWS + } } diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/appveyor.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/appveyor.json index 356b75f318c..def208d7110 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/appveyor.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/appveyor.json @@ -6,7 +6,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -26,7 +26,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -37,7 +37,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -57,7 +57,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -68,7 +68,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -88,7 +88,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -99,7 +99,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -119,7 +119,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -130,7 +130,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -152,7 +152,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -163,7 +163,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -183,7 +183,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -194,7 +194,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -216,7 +216,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -227,7 +227,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -254,7 +254,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "origin/master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -274,7 +274,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -285,7 +285,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "refs/heads/master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -305,7 +305,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -316,7 +316,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "refs/heads/feature/one", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -336,7 +336,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -348,7 +348,7 @@ "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_PULL_REQUEST_HEAD_REPO_BRANCH": "origin/pr", "APPVEYOR_REPO_BRANCH": "origin/master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -368,7 +368,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -380,7 +380,7 @@ "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_PULL_REQUEST_HEAD_REPO_BRANCH": "refs/heads/pr", "APPVEYOR_REPO_BRANCH": "refs/heads/master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -400,7 +400,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git" } ], @@ -411,7 +411,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "origin/master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -432,7 +432,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git", "git.tag": "0.1.0" } @@ -444,7 +444,7 @@ "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", "APPVEYOR_REPO_BRANCH": "refs/heads/master", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -465,7 +465,7 @@ "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", - "git.commit.sha": "appveyor-repo-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/appveyor-repo-name.git", "git.tag": "0.1.0" } @@ -475,7 +475,7 @@ "APPVEYOR": "true", "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", @@ -516,7 +516,7 @@ "APPVEYOR": "true", "APPVEYOR_BUILD_ID": "appveyor-build-id", "APPVEYOR_BUILD_NUMBER": "appveyor-pipeline-number", - "APPVEYOR_REPO_COMMIT": "appveyor-repo-commit", + "APPVEYOR_REPO_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "APPVEYOR_REPO_COMMIT_AUTHOR": "appveyor-commit-author-name", "APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL": "appveyor-commit-author-email@datadoghq.com", "APPVEYOR_REPO_COMMIT_MESSAGE": "appveyor-commit-message", diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/awscodepipeline.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/awscodepipeline.json new file mode 100644 index 00000000000..6f3071331fe --- /dev/null +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/awscodepipeline.json @@ -0,0 +1,62 @@ +[ + [ + { + "CODEBUILD_BUILD_ARN": "arn:aws:codebuild:eu-north-1:12345678:build/codebuild-demo-project:b1e6661e-e4f2-4156-9ab9-82a19", + "CODEBUILD_INITIATOR": "codepipeline/test-pipeline", + "DD_ACTION_EXECUTION_ID": "35519dc3-7c45-493c-9ba6-cd78ea11f69d", + "DD_GIT_BRANCH": "user-supplied-branch", + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git", + "DD_PIPELINE_EXECUTION_ID": "bb1f15ed-fde2-494d-8e13-88785bca9cc0" + }, + { + "_dd.ci.env_vars": "{\"CODEBUILD_BUILD_ARN\":\"arn:aws:codebuild:eu-north-1:12345678:build/codebuild-demo-project:b1e6661e-e4f2-4156-9ab9-82a19\",\"DD_PIPELINE_EXECUTION_ID\":\"bb1f15ed-fde2-494d-8e13-88785bca9cc0\",\"DD_ACTION_EXECUTION_ID\":\"35519dc3-7c45-493c-9ba6-cd78ea11f69d\"}", + "ci.pipeline.id": "bb1f15ed-fde2-494d-8e13-88785bca9cc0", + "ci.provider.name": "awscodepipeline", + "git.branch": "user-supplied-branch", + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "git@github.com:DataDog/userrepo.git" + } + ], + [ + { + "CODEBUILD_INITIATOR": "lambdafunction/test-lambda", + "DD_GIT_BRANCH": "user-supplied-branch", + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git" + }, + { + "git.branch": "user-supplied-branch", + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "git@github.com:DataDog/userrepo.git" + } + ] +] diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/azurepipelines.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/azurepipelines.json index 6682b1a3b34..9262c329866 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/azurepipelines.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/azurepipelines.json @@ -3,418 +3,418 @@ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "master", "BUILD_SOURCESDIRECTORY": "/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "master", "BUILD_SOURCESDIRECTORY": "/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", - "SYSTEM_PULLREQUEST_SOURCEREPOSITORYURI": "sample2", + "SYSTEM_PULLREQUEST_SOURCEREPOSITORYURI": "https://azure-pipelines-server-uri.com/pull.git", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample2" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/pull.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "origin/master", "BUILD_SOURCESDIRECTORY": "foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "foo/bar", "git.branch": "master", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "origin/master", "BUILD_SOURCESDIRECTORY": "/foo/bar~", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "HOME": "/not-my-home", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True", "USERPROFILE": "/not-my-home" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/bar~", "git.branch": "master", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "origin/master", "BUILD_SOURCESDIRECTORY": "/foo/~/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "HOME": "/not-my-home", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True", "USERPROFILE": "/not-my-home" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/~/bar", "git.branch": "master", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "origin/master", "BUILD_SOURCESDIRECTORY": "~/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "HOME": "/not-my-home", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True", "USERPROFILE": "/not-my-home" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/not-my-home/foo/bar", "git.branch": "master", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "origin/master", "BUILD_SOURCESDIRECTORY": "~foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "HOME": "/not-my-home", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True", "USERPROFILE": "/not-my-home" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "~foo/bar", "git.branch": "master", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "origin/master", "BUILD_SOURCESDIRECTORY": "~", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "HOME": "/not-my-home", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True", "USERPROFILE": "/not-my-home" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/not-my-home", "git.branch": "master", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "origin/master", "BUILD_SOURCESDIRECTORY": "/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "refs/heads/master", "BUILD_SOURCESDIRECTORY": "/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "refs/heads/feature/one", "BUILD_SOURCESDIRECTORY": "/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/bar", "git.branch": "feature/one", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "origin/tags/0.1.0", "BUILD_SOURCESDIRECTORY": "/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/bar", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git", "git.tag": "0.1.0" } ], @@ -422,33 +422,33 @@ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "refs/heads/tags/0.1.0", "BUILD_SOURCESDIRECTORY": "/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/bar", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git", "git.tag": "0.1.0" } ], @@ -456,65 +456,65 @@ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "origin/master", "BUILD_SOURCESDIRECTORY": "/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_PULLREQUEST_SOURCEBRANCH": "origin/pr", - "SYSTEM_PULLREQUEST_SOURCECOMMITID": "commitPR", + "SYSTEM_PULLREQUEST_SOURCECOMMITID": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/bar", "git.branch": "pr", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commitPR", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "refs/heads/master", "BUILD_SOURCESDIRECTORY": "/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_PULLREQUEST_SOURCEBRANCH": "refs/heads/pr", - "SYSTEM_PULLREQUEST_SOURCECOMMITID": "commitPR", + "SYSTEM_PULLREQUEST_SOURCECOMMITID": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "SYSTEM_STAGEDISPLAYNAME": "azure-pipelines-stage-name", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.stage.name": "azure-pipelines-stage-name", "ci.workspace_path": "/foo/bar", @@ -522,46 +522,46 @@ "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commitPR", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "sample", + "BUILD_REPOSITORY_URI": "https://azure-pipelines-server-uri.com/build.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEBRANCH": "refs/heads/feature/one", "BUILD_SOURCESDIRECTORY": "/foo/bar", - "BUILD_SOURCEVERSION": "commit", + "BUILD_SOURCEVERSION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBDISPLAYNAME": "azure-pipelines-job-name", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_PULLREQUEST_SOURCEBRANCH": "refs/heads/pr", - "SYSTEM_PULLREQUEST_SOURCECOMMITID": "commitPR", + "SYSTEM_PULLREQUEST_SOURCECOMMITID": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", "ci.job.name": "azure-pipelines-job-name", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "ci.workspace_path": "/foo/bar", "git.branch": "pr", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.commit.sha": "commitPR", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://azure-pipelines-server-uri.com/build.git" } ], [ @@ -583,17 +583,17 @@ "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "git.branch": "user-supplied-branch", "git.commit.author.date": "usersupplied-authordate", @@ -626,17 +626,17 @@ "DD_GIT_TAG": "0.0.2", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "git.commit.author.date": "usersupplied-authordate", "git.commit.author.email": "usersupplied-authoremail", @@ -654,28 +654,28 @@ { "BUILD_BUILDID": "azure-pipelines-build-id", "BUILD_DEFINITIONNAME": "azure-pipelines-name", - "BUILD_REPOSITORY_URI": "https://user:password@dev.azure.com/fabrikamfiber/", + "BUILD_REPOSITORY_URI": "https://user:password@dev.azure.com/fabrikamfiber/repo.git", "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", - "SYSTEM_TEAMFOUNDATIONSERVERURI": "azure-pipelines-server-uri/", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", "TF_BUILD": "True" }, { "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", - "ci.job.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", "ci.pipeline.id": "azure-pipelines-build-id", "ci.pipeline.name": "azure-pipelines-name", "ci.pipeline.number": "azure-pipelines-build-id", - "ci.pipeline.url": "azure-pipelines-server-uri/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", "ci.provider.name": "azurepipelines", "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", "git.commit.author.name": "azure-pipelines-commit-author", "git.commit.message": "azure-pipelines-commit-message", - "git.repository_url": "https://dev.azure.com/fabrikamfiber/" + "git.repository_url": "https://dev.azure.com/fabrikamfiber/repo.git" } ] ] diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/bitbucket.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/bitbucket.json index c7171cf02e2..4b3c7e52c93 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/bitbucket.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/bitbucket.json @@ -4,8 +4,8 @@ "BITBUCKET_BRANCH": "master", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "/foo/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" }, @@ -18,8 +18,31 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" + } + ], + [ + { + "BITBUCKET_BRANCH": "master", + "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", + "BITBUCKET_CLONE_DIR": "foo/bar", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", + "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", + "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" + }, + { + "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.pipeline.id": "bitbucket-uuid", + "ci.pipeline.name": "bitbucket-repo", + "ci.pipeline.number": "bitbucket-build-num", + "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.provider.name": "bitbucket", + "ci.workspace_path": "foo/bar", + "git.branch": "master", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" } ], [ @@ -27,8 +50,9 @@ "BITBUCKET_BRANCH": "master", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "foo/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", + "BITBUCKET_GIT_SSH_ORIGIN": "git@github.com:DataDog/dummy-example.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" }, @@ -41,8 +65,8 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "foo/bar", "git.branch": "master", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "git@github.com:DataDog/dummy-example.git" } ], [ @@ -50,8 +74,8 @@ "BITBUCKET_BRANCH": "master", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "/foo/bar~", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" }, @@ -64,8 +88,8 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "/foo/bar~", "git.branch": "master", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" } ], [ @@ -73,8 +97,8 @@ "BITBUCKET_BRANCH": "master", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "/foo/~/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" }, @@ -87,8 +111,8 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "/foo/~/bar", "git.branch": "master", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" } ], [ @@ -96,8 +120,8 @@ "BITBUCKET_BRANCH": "master", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "~/foo/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo", "HOME": "/not-my-home", @@ -112,8 +136,8 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "/not-my-home/foo/bar", "git.branch": "master", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" } ], [ @@ -121,8 +145,8 @@ "BITBUCKET_BRANCH": "master", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "~foo/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" }, @@ -135,8 +159,8 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "~foo/bar", "git.branch": "master", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" } ], [ @@ -144,8 +168,8 @@ "BITBUCKET_BRANCH": "master", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "~", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo", "HOME": "/not-my-home", @@ -160,8 +184,8 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "/not-my-home", "git.branch": "master", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" } ], [ @@ -169,8 +193,8 @@ "BITBUCKET_BRANCH": "master", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "/foo/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" }, @@ -183,8 +207,8 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" } ], [ @@ -192,8 +216,8 @@ "BITBUCKET_BRANCH": "origin/master", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "/foo/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" }, @@ -206,8 +230,8 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" } ], [ @@ -215,8 +239,8 @@ "BITBUCKET_BRANCH": "refs/heads/master", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "/foo/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" }, @@ -229,8 +253,8 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" } ], [ @@ -238,8 +262,8 @@ "BITBUCKET_BRANCH": "refs/heads/feature/one", "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "/foo/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" }, @@ -252,16 +276,16 @@ "ci.provider.name": "bitbucket", "ci.workspace_path": "/foo/bar", "git.branch": "feature/one", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git" } ], [ { "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "/foo/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo", "BITBUCKET_TAG": "origin/tags/0.1.0" @@ -274,8 +298,8 @@ "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", "ci.provider.name": "bitbucket", "ci.workspace_path": "/foo/bar", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git", "git.tag": "0.1.0" } ], @@ -283,8 +307,8 @@ { "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", "BITBUCKET_CLONE_DIR": "/foo/bar", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo", "BITBUCKET_TAG": "refs/heads/tags/0.1.0" @@ -297,16 +321,16 @@ "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", "ci.provider.name": "bitbucket", "ci.workspace_path": "/foo/bar", - "git.commit.sha": "bitbucket-commit", - "git.repository_url": "bitbucket-repo-url", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket-repo-url.com/repo.git", "git.tag": "0.1.0" } ], [ { "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo", "DD_GIT_BRANCH": "user-supplied-branch", @@ -342,8 +366,8 @@ [ { "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "bitbucket-repo-url", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket-repo-url.com/repo.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", @@ -379,8 +403,8 @@ [ { "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", - "BITBUCKET_COMMIT": "bitbucket-commit", - "BITBUCKET_GIT_SSH_ORIGIN": "https://user:password@bitbucket.org/DataDog/dogweb.git", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://user:password@bitbucket.org/DataDog/dogweb.git", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" }, @@ -391,7 +415,7 @@ "ci.pipeline.number": "bitbucket-build-num", "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", "ci.provider.name": "bitbucket", - "git.commit.sha": "bitbucket-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://bitbucket.org/DataDog/dogweb.git" } ] diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/bitrise.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/bitrise.json index a3ed2305ba9..5563094dc01 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/bitrise.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/bitrise.json @@ -3,137 +3,161 @@ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", - "BITRISE_GIT_COMMIT": "gitcommit", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "/foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/bar", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "gitcommit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git" + } + ], + [ + { + "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", + "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", + "BITRISE_SOURCE_DIR": "/foo/bar", + "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "git@github.com:DataDog/dummy-example.git" + }, + { + "ci.pipeline.id": "bitrise-pipeline-id", + "ci.pipeline.name": "bitrise-pipeline-name", + "ci.pipeline.number": "bitrise-pipeline-number", + "ci.pipeline.url": "https://bitrise-build-url.com//", + "ci.provider.name": "bitrise", + "ci.workspace_path": "/foo/bar", + "git.commit.message": "bitrise-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "git@github.com:DataDog/dummy-example.git" } ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/master", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "/foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git" } ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/master", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "foo/bar", "git.branch": "master", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git" } ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/master", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "/foo/bar~", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/bar~", "git.branch": "master", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git" } ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/master", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "/foo/~/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/~/bar", "git.branch": "master", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git" } ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/master", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "~/foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git", "HOME": "/not-my-home", "USERPROFILE": "/not-my-home" }, @@ -141,26 +165,26 @@ "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/not-my-home/foo/bar", "git.branch": "master", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git" } ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/master", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "~foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git", "HOME": "/not-my-home", "USERPROFILE": "/not-my-home" }, @@ -168,26 +192,26 @@ "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "~foo/bar", "git.branch": "master", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git" } ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/master", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "~", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git", "HOME": "/not-my-home", "USERPROFILE": "/not-my-home" }, @@ -195,88 +219,88 @@ "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/not-my-home", "git.branch": "master", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git" } ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "refs/heads/master", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "/foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git" } ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "refs/heads/feature/one", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "/foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/bar", "git.branch": "feature/one", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git" } ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/tags/0.1.0", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_GIT_TAG": "origin/tags/0.1.0", "BITRISE_SOURCE_DIR": "/foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/bar", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git", "git.tag": "0.1.0" } ], @@ -284,25 +308,25 @@ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "refs/heads/tags/0.1.0", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_GIT_TAG": "refs/heads/tags/0.1.0", "BITRISE_SOURCE_DIR": "/foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", - "GIT_REPOSITORY_URL": "sample" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://bitrise-build-url.com/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/bar", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitrise-build-url.com/repo.git", "git.tag": "0.1.0" } ], @@ -310,24 +334,24 @@ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/master", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "/foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_REPOSITORY_URL": "http://hostname.com/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -335,24 +359,24 @@ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/master", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "/foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_REPOSITORY_URL": "git@hostname.com:org/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "git@hostname.com:org/repo.git" } ], @@ -360,24 +384,24 @@ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_BRANCH": "origin/notmaster", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_SOURCE_DIR": "/foo/bar", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_REPOSITORY_URL": "git@hostname.com:org/repo.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "ci.workspace_path": "/foo/bar", "git.branch": "notmaster", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "git@hostname.com:org/repo.git" } ], @@ -385,7 +409,7 @@ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", "DD_GIT_BRANCH": "user-supplied-branch", @@ -398,13 +422,13 @@ "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "git.branch": "user-supplied-branch", "git.commit.author.date": "usersupplied-authordate", @@ -422,7 +446,7 @@ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", @@ -435,13 +459,13 @@ "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git", "DD_GIT_TAG": "0.0.2", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit" + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "git.commit.author.date": "usersupplied-authordate", "git.commit.author.email": "usersupplied-authoremail", @@ -459,20 +483,20 @@ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", - "BITRISE_BUILD_URL": "bitrise-build-url", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "GIT_CLONE_COMMIT_HASH": "bitrise-git-commit", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_REPOSITORY_URL": "https://user:password@github.com/DataDog/dogweb.git" }, { "ci.pipeline.id": "bitrise-pipeline-id", "ci.pipeline.name": "bitrise-pipeline-name", "ci.pipeline.number": "bitrise-pipeline-number", - "ci.pipeline.url": "bitrise-build-url", + "ci.pipeline.url": "https://bitrise-build-url.com//", "ci.provider.name": "bitrise", "git.commit.message": "bitrise-git-commit-message", - "git.commit.sha": "bitrise-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/DataDog/dogweb.git" } ] diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/buddy.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/buddy.json index 57f34d10a8b..26bf616455b 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/buddy.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/buddy.json @@ -4,7 +4,7 @@ "BUDDY": "true", "BUDDY_EXECUTION_BRANCH": "master", "BUDDY_EXECUTION_ID": "buddy-execution-id", - "BUDDY_EXECUTION_REVISION": "e5e13f8b7f8d5c6096a0501dc09b48eef05fea96", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", @@ -12,7 +12,7 @@ "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", "BUDDY_PIPELINE_ID": "456", "BUDDY_PIPELINE_NAME": "Deploy to Production", - "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project" + "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project.git" }, { "ci.pipeline.id": "456/buddy-execution-id", @@ -24,8 +24,8 @@ "git.commit.committer.email": "mikebenson@buddy.works", "git.commit.committer.name": "Mike Benson", "git.commit.message": "Create buddy.yml", - "git.commit.sha": "e5e13f8b7f8d5c6096a0501dc09b48eef05fea96", - "git.repository_url": "https://github.com/buddyworks/my-project", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/buddyworks/my-project.git", "git.tag": "v1.0" } ], @@ -34,7 +34,7 @@ "BUDDY": "true", "BUDDY_EXECUTION_BRANCH": "my-name-is-rotag/fix-original-bug", "BUDDY_EXECUTION_ID": "buddy-execution-id", - "BUDDY_EXECUTION_REVISION": "e5e13f8b7f8d5c6096a0501dc09b48eef05fea96", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", @@ -42,7 +42,7 @@ "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", "BUDDY_PIPELINE_ID": "456", "BUDDY_PIPELINE_NAME": "Deploy to Production", - "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project" + "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project.git" }, { "ci.pipeline.id": "456/buddy-execution-id", @@ -54,8 +54,8 @@ "git.commit.committer.email": "mikebenson@buddy.works", "git.commit.committer.name": "Mike Benson", "git.commit.message": "Create buddy.yml", - "git.commit.sha": "e5e13f8b7f8d5c6096a0501dc09b48eef05fea96", - "git.repository_url": "https://github.com/buddyworks/my-project", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/buddyworks/my-project.git", "git.tag": "v1.0" } ], @@ -64,7 +64,7 @@ "BUDDY": "true", "BUDDY_EXECUTION_BRANCH": "refs/heads/feature/one", "BUDDY_EXECUTION_ID": "buddy-execution-id", - "BUDDY_EXECUTION_REVISION": "e5e13f8b7f8d5c6096a0501dc09b48eef05fea96", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", @@ -72,7 +72,7 @@ "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", "BUDDY_PIPELINE_ID": "456", "BUDDY_PIPELINE_NAME": "Deploy to Production", - "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project" + "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project.git" }, { "ci.pipeline.id": "456/buddy-execution-id", @@ -84,8 +84,8 @@ "git.commit.committer.email": "mikebenson@buddy.works", "git.commit.committer.name": "Mike Benson", "git.commit.message": "Create buddy.yml", - "git.commit.sha": "e5e13f8b7f8d5c6096a0501dc09b48eef05fea96", - "git.repository_url": "https://github.com/buddyworks/my-project", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/buddyworks/my-project.git", "git.tag": "0.2.0" } ], @@ -94,7 +94,7 @@ "BUDDY": "true", "BUDDY_EXECUTION_BRANCH": "master", "BUDDY_EXECUTION_ID": "buddy-execution-id", - "BUDDY_EXECUTION_REVISION": "e5e13f8b7f8d5c6096a0501dc09b48eef05fea96", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", @@ -102,7 +102,7 @@ "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", "BUDDY_PIPELINE_ID": "456", "BUDDY_PIPELINE_NAME": "Deploy to Production", - "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project", + "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project.git", "DD_GIT_BRANCH": "user-supplied-branch", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", @@ -139,7 +139,7 @@ "BUDDY": "true", "BUDDY_EXECUTION_BRANCH": "master", "BUDDY_EXECUTION_ID": "buddy-execution-id", - "BUDDY_EXECUTION_REVISION": "e5e13f8b7f8d5c6096a0501dc09b48eef05fea96", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", @@ -147,7 +147,7 @@ "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", "BUDDY_PIPELINE_ID": "456", "BUDDY_PIPELINE_NAME": "Deploy to Production", - "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project", + "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project.git", "DD_GIT_BRANCH": "user-supplied-branch", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/buildkite.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/buildkite.json index f70b0590916..c332fd740d7 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/buildkite.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/buildkite.json @@ -8,8 +8,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -18,18 +18,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -42,8 +42,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -52,18 +52,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "foo/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -76,8 +76,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar~", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -86,18 +86,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar~", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -110,8 +110,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/~/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -120,18 +120,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/~/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -144,8 +144,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "~/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -156,18 +156,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/not-my-home/foo/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -180,8 +180,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "~foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -192,18 +192,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "~foo/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -216,8 +216,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "~", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -228,18 +228,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/not-my-home", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -252,8 +252,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -262,18 +262,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -286,8 +286,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -296,18 +296,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -320,8 +320,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -330,18 +330,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -354,8 +354,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -364,18 +364,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "git@hostname.com:org/repo.git" } ], @@ -388,8 +388,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -398,18 +398,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -422,8 +422,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -432,18 +432,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -456,8 +456,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -466,18 +466,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.branch": "feature/one", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -490,8 +490,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -500,17 +500,17 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git", "git.tag": "0.1.0" } @@ -524,8 +524,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -534,17 +534,17 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git", "git.tag": "0.1.0" } @@ -558,8 +558,8 @@ "BUILDKITE_BUILD_CHECKOUT_PATH": "/foo/bar", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -568,17 +568,17 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "ci.workspace_path": "/foo/bar", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git", "git.tag": "0.1.0" } @@ -591,8 +591,8 @@ "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -610,11 +610,11 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "git.branch": "user-supplied-branch", "git.commit.author.date": "usersupplied-authordate", @@ -636,8 +636,8 @@ "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -655,11 +655,11 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "git.commit.author.date": "usersupplied-authordate", "git.commit.author.email": "usersupplied-authoremail", @@ -681,8 +681,8 @@ "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -691,16 +691,16 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/DataDog/dogweb.git" } ], @@ -715,8 +715,8 @@ "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", - "BUILDKITE_BUILD_URL": "buildkite-build-url", - "BUILDKITE_COMMIT": "buildkite-git-commit", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "BUILDKITE_JOB_ID": "buildkite-job-id", "BUILDKITE_MESSAGE": "buildkite-git-commit-message", "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", @@ -724,18 +724,18 @@ }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", - "ci.job.url": "buildkite-build-url#buildkite-job-id", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", "ci.node.labels": "[\"mytag:my-value\",\"myothertag:my-other-value\"]", "ci.node.name": "1a222222-e999-3636-8ddd-802222222222", "ci.pipeline.id": "buildkite-pipeline-id", "ci.pipeline.name": "buildkite-pipeline-name", "ci.pipeline.number": "buildkite-pipeline-number", - "ci.pipeline.url": "buildkite-build-url", + "ci.pipeline.url": "https://buildkite-build-url.com", "ci.provider.name": "buildkite", "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", "git.commit.author.name": "buildkite-git-commit-author-name", "git.commit.message": "buildkite-git-commit-message", - "git.commit.sha": "buildkite-git-commit" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123" } ] ] diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/circleci.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/circleci.json index 5364d69b6ca..8efa8c353f0 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/circleci.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/circleci.json @@ -4,26 +4,26 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git" } ], [ @@ -31,26 +31,26 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "foo/bar" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "foo/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git" } ], [ @@ -58,26 +58,26 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar~" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar~", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git" } ], [ @@ -85,26 +85,26 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/~/bar" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/~/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git" } ], [ @@ -112,11 +112,11 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "~/foo/bar", "HOME": "/not-my-home", @@ -125,15 +125,15 @@ { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/not-my-home/foo/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git" } ], [ @@ -141,11 +141,11 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "~foo/bar", "HOME": "/not-my-home", @@ -154,15 +154,15 @@ { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "~foo/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git" } ], [ @@ -170,11 +170,11 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "~", "HOME": "/not-my-home", @@ -183,15 +183,15 @@ { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/not-my-home", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git" } ], [ @@ -199,26 +199,26 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "refs/heads/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git" } ], [ @@ -226,26 +226,26 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "refs/heads/feature/one", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar", "git.branch": "feature/one", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git" } ], [ @@ -253,11 +253,11 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/tags/0.1.0", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_TAG": "origin/tags/0.1.0", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar" @@ -265,14 +265,14 @@ { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git", "git.tag": "0.1.0" } ], @@ -281,11 +281,11 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "refs/heads/tags/0.1.0", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_REPOSITORY_URL": "sample", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_REPOSITORY_URL": "https://circleci-build-url.com/repo.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_TAG": "refs/heads/tags/0.1.0", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar" @@ -293,14 +293,14 @@ { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar", - "git.commit.sha": "circleci-git-commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://circleci-build-url.com/repo.git", "git.tag": "0.1.0" } ], @@ -309,25 +309,25 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", "CIRCLE_REPOSITORY_URL": "http://hostname.com/repo.git", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -336,25 +336,25 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", "CIRCLE_REPOSITORY_URL": "http://user@hostname.com/repo.git", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -363,25 +363,25 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", "CIRCLE_REPOSITORY_URL": "http://user%E2%82%AC@hostname.com/repo.git", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -390,25 +390,25 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", "CIRCLE_REPOSITORY_URL": "http://user:pwd@hostname.com/repo.git", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -417,25 +417,25 @@ "CIRCLECI": "circleCI", "CIRCLE_BRANCH": "origin/master", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", "CIRCLE_REPOSITORY_URL": "git@hostname.com:org/repo.git", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "CIRCLE_WORKING_DIRECTORY": "/foo/bar" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "circleci-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "git@hostname.com:org/repo.git" } ], @@ -443,10 +443,10 @@ { "CIRCLECI": "circleCI", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "DD_GIT_BRANCH": "user-supplied-branch", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", @@ -462,7 +462,7 @@ { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", @@ -483,10 +483,10 @@ { "CIRCLECI": "circleCI", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", @@ -502,7 +502,7 @@ { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", @@ -523,22 +523,22 @@ { "CIRCLECI": "circleCI", "CIRCLE_BUILD_NUM": "circleci-pipeline-number", - "CIRCLE_BUILD_URL": "circleci-build-url", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", "CIRCLE_JOB": "circleci-job-name", "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", "CIRCLE_REPOSITORY_URL": "https://user:password@github.com/DataDog/dogweb.git", - "CIRCLE_SHA1": "circleci-git-commit", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", "ci.job.name": "circleci-job-name", - "ci.job.url": "circleci-build-url", + "ci.job.url": "https://circleci-build-url.com/", "ci.pipeline.id": "circleci-pipeline-id", "ci.pipeline.name": "circleci-pipeline-name", "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", "ci.provider.name": "circleci", - "git.commit.sha": "circleci-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/DataDog/dogweb.git" } ] diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/github.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/github.json index 918ff12a5ef..e5df52c58ba 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/github.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/github.json @@ -8,14 +8,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://ghenterprise.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://ghenterprise.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://ghenterprise.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://ghenterprise.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -23,7 +23,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://ghenterprise.com/ghactions-repo.git" } ], @@ -37,14 +37,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -52,7 +52,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -66,14 +66,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -81,7 +81,7 @@ "ci.provider.name": "github", "ci.workspace_path": "foo/bar", "git.branch": "master", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -95,14 +95,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar~" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -110,7 +110,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/foo/bar~", "git.branch": "master", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -124,14 +124,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/~/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -139,7 +139,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/foo/~/bar", "git.branch": "master", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -153,7 +153,7 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "~/foo/bar", "HOME": "/not-my-home", @@ -162,7 +162,7 @@ { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -170,7 +170,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/not-my-home/foo/bar", "git.branch": "master", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -184,7 +184,7 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "~foo/bar", "HOME": "/not-my-home", @@ -193,7 +193,7 @@ { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -201,7 +201,7 @@ "ci.provider.name": "github", "ci.workspace_path": "~foo/bar", "git.branch": "master", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -215,7 +215,7 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "~", "HOME": "/not-my-home", @@ -224,7 +224,7 @@ { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -232,7 +232,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/not-my-home", "git.branch": "master", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -246,14 +246,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -261,7 +261,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -275,14 +275,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -290,7 +290,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -304,14 +304,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -319,7 +319,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", "git.branch": "feature/one", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -333,21 +333,21 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", "ci.pipeline.url": "https://github.com/ghactions-repo/actions/runs/ghactions-pipeline-id/attempts/ghactions-run-attempt", "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git", "git.tag": "0.1.0" } @@ -362,21 +362,21 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", "ci.pipeline.url": "https://github.com/ghactions-repo/actions/runs/ghactions-pipeline-id/attempts/ghactions-run-attempt", "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git", "git.tag": "0.1.0" } @@ -392,14 +392,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -407,7 +407,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", "git.branch": "other", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -422,14 +422,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -437,7 +437,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", "git.branch": "other", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -452,14 +452,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -467,7 +467,7 @@ "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", "git.branch": "feature/other", - "git.commit.sha": "ghactions-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/ghactions-repo.git" } ], @@ -490,14 +490,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", @@ -535,14 +535,14 @@ "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://github.com", - "GITHUB_SHA": "ghactions-commit", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GITHUB_WORKFLOW": "ghactions-pipeline-name", "GITHUB_WORKSPACE": "foo/bar" }, { "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", - "ci.job.url": "https://github.com/ghactions-repo/commit/ghactions-commit/checks", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/gitlab.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/gitlab.json index 2d16d31225b..c1879ed80bd 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/gitlab.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/gitlab.json @@ -4,63 +4,25 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", - "CI_PIPELINE_ID": "gitlab-pipeline-id", - "CI_PIPELINE_IID": "gitlab-pipeline-number", - "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", - "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", - "GITLAB_CI": "gitlab" - }, - { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", - "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", - "ci.pipeline.id": "gitlab-pipeline-id", - "ci.pipeline.name": "gitlab-pipeline-name", - "ci.pipeline.number": "gitlab-pipeline-number", - "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", - "ci.provider.name": "gitlab", - "ci.stage.name": "gitlab-stage-name", - "git.branch": "master", - "git.commit.author.date": "2021-07-21T11:43:07-04:00", - "git.commit.author.email": "john@doe.com", - "git.commit.author.name": "John Doe", - "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" - } - ], - [ - { - "CI_COMMIT_AUTHOR": "John Doe ", - "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", - "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", - "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", - "CI_JOB_ID": "gitlab-job-id", - "CI_JOB_NAME": "gitlab-job-name", - "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -73,8 +35,8 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git" } ], [ @@ -82,25 +44,25 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar~", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -113,8 +75,8 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git" } ], [ @@ -122,25 +84,25 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/~/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -153,8 +115,8 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git" } ], [ @@ -162,27 +124,27 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "~/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab", "HOME": "/not-my-home", "USERPROFILE": "/not-my-home" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -195,8 +157,8 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git" } ], [ @@ -204,27 +166,27 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "~foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab", "HOME": "/not-my-home", "USERPROFILE": "/not-my-home" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -237,8 +199,8 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git" } ], [ @@ -246,27 +208,27 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "~", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab", "HOME": "/not-my-home", "USERPROFILE": "/not-my-home" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -279,86 +241,8 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" - } - ], - [ - { - "CI_COMMIT_AUTHOR": "John Doe ", - "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", - "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", - "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", - "CI_JOB_ID": "gitlab-job-id", - "CI_JOB_NAME": "gitlab-job-name", - "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", - "CI_PIPELINE_ID": "gitlab-pipeline-id", - "CI_PIPELINE_IID": "gitlab-pipeline-number", - "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", - "CI_PROJECT_DIR": "/foo/bar", - "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", - "GITLAB_CI": "gitlab" - }, - { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", - "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", - "ci.pipeline.id": "gitlab-pipeline-id", - "ci.pipeline.name": "gitlab-pipeline-name", - "ci.pipeline.number": "gitlab-pipeline-number", - "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", - "ci.provider.name": "gitlab", - "ci.stage.name": "gitlab-stage-name", - "ci.workspace_path": "/foo/bar", - "git.branch": "master", - "git.commit.author.date": "2021-07-21T11:43:07-04:00", - "git.commit.author.email": "john@doe.com", - "git.commit.author.name": "John Doe", - "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" - } - ], - [ - { - "CI_COMMIT_AUTHOR": "John Doe ", - "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", - "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", - "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", - "CI_JOB_ID": "gitlab-job-id", - "CI_JOB_NAME": "gitlab-job-name", - "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", - "CI_PIPELINE_ID": "gitlab-pipeline-id", - "CI_PIPELINE_IID": "gitlab-pipeline-number", - "CI_PROJECT_DIR": "/foo/bar", - "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", - "GITLAB_CI": "gitlab" - }, - { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", - "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", - "ci.pipeline.id": "gitlab-pipeline-id", - "ci.pipeline.name": "gitlab-pipeline-name", - "ci.pipeline.number": "gitlab-pipeline-number", - "ci.provider.name": "gitlab", - "ci.stage.name": "gitlab-stage-name", - "ci.workspace_path": "/foo/bar", - "git.branch": "master", - "git.commit.author.date": "2021-07-21T11:43:07-04:00", - "git.commit.author.email": "john@doe.com", - "git.commit.author.name": "John Doe", - "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git" } ], [ @@ -366,25 +250,25 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "refs/heads/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -397,8 +281,8 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git" } ], [ @@ -406,25 +290,25 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "refs/heads/feature/one", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -437,34 +321,35 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git" } ], [ { "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TAG": "origin/tags/0.1.0", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -472,12 +357,13 @@ "ci.provider.name": "gitlab", "ci.stage.name": "gitlab-stage-name", "ci.workspace_path": "/foo/bar", + "git.branch": "master", "git.commit.author.date": "2021-07-21T11:43:07-04:00", "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git", "git.tag": "0.1.0" } ], @@ -485,26 +371,27 @@ { "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TAG": "refs/heads/tags/0.1.0", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -512,12 +399,13 @@ "ci.provider.name": "gitlab", "ci.stage.name": "gitlab-stage-name", "ci.workspace_path": "/foo/bar", + "git.branch": "master", "git.commit.author.date": "2021-07-21T11:43:07-04:00", "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git", "git.tag": "0.1.0" } ], @@ -525,26 +413,27 @@ { "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TAG": "0.1.0", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", - "CI_REPOSITORY_URL": "sample", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -552,12 +441,13 @@ "ci.provider.name": "gitlab", "ci.stage.name": "gitlab-stage-name", "ci.workspace_path": "/foo/bar", + "git.branch": "master", "git.commit.author.date": "2021-07-21T11:43:07-04:00", "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git", "git.tag": "0.1.0" } ], @@ -566,25 +456,25 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", + "CI_PROJECT_URL": "https://gitlab.com/repo", "CI_REPOSITORY_URL": "http://hostname.com/repo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -597,7 +487,7 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -606,25 +496,25 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", + "CI_PROJECT_URL": "https://gitlab.com/repo", "CI_REPOSITORY_URL": "http://user@hostname.com/repo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -637,7 +527,7 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -646,25 +536,25 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", + "CI_PROJECT_URL": "https://gitlab.com/repo", "CI_REPOSITORY_URL": "http://user%E2%82%AC@hostname.com/repo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -677,7 +567,7 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -686,25 +576,25 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", + "CI_PROJECT_URL": "https://gitlab.com/repo", "CI_REPOSITORY_URL": "http://user:pwd@hostname.com/repo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -717,7 +607,7 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -726,25 +616,25 @@ "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", "CI_COMMIT_REF_NAME": "origin/master", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", + "CI_PROJECT_URL": "https://gitlab.com/repo", "CI_REPOSITORY_URL": "git@hostname.com:org/repo.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", @@ -757,7 +647,7 @@ "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "git@hostname.com:org/repo.git" } ], @@ -765,16 +655,20 @@ { "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", + "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", + "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "DD_GIT_BRANCH": "user-supplied-branch", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", @@ -788,14 +682,16 @@ "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", + "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", "ci.provider.name": "gitlab", "ci.stage.name": "gitlab-stage-name", + "ci.workspace_path": "/foo/bar", "git.branch": "user-supplied-branch", "git.commit.author.date": "usersupplied-authordate", "git.commit.author.email": "usersupplied-authoremail", @@ -812,16 +708,20 @@ { "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", + "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", + "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", @@ -835,14 +735,17 @@ "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", + "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", "ci.provider.name": "gitlab", "ci.stage.name": "gitlab-stage-name", + "ci.workspace_path": "/foo/bar", + "git.branch": "master", "git.commit.author.date": "usersupplied-authordate", "git.commit.author.email": "usersupplied-authoremail", "git.commit.author.name": "usersupplied-authorname", @@ -859,33 +762,39 @@ { "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", + "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", + "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", + "CI_PROJECT_URL": "https://gitlab.com/repo", "CI_REPOSITORY_URL": "https://user:password@gitlab.com/DataDog/dogweb.git", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", + "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", "ci.provider.name": "gitlab", "ci.stage.name": "gitlab-stage-name", + "ci.workspace_path": "/foo/bar", + "git.branch": "master", "git.commit.author.date": "2021-07-21T11:43:07-04:00", "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://gitlab.com/DataDog/dogweb.git" } ], @@ -893,36 +802,44 @@ { "CI_COMMIT_AUTHOR": "John Doe ", "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", - "CI_COMMIT_SHA": "gitlab-git-commit", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", "CI_JOB_ID": "gitlab-job-id", "CI_JOB_NAME": "gitlab-job-name", "CI_JOB_STAGE": "gitlab-stage-name", - "CI_JOB_URL": "gitlab-job-url", + "CI_JOB_URL": "https://gitlab.com/job", "CI_PIPELINE_ID": "gitlab-pipeline-id", "CI_PIPELINE_IID": "gitlab-pipeline-number", + "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", + "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", - "CI_PROJECT_URL": "gitlab-project-url", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", "CI_RUNNER_ID": "9393040", "CI_RUNNER_TAGS": "[\"arch:arm64\",\"linux\"]", "GITLAB_CI": "gitlab" }, { - "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"gitlab-project-url\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", "ci.job.name": "gitlab-job-name", - "ci.job.url": "gitlab-job-url", + "ci.job.url": "https://gitlab.com/job", "ci.node.labels": "[\"arch:arm64\",\"linux\"]", "ci.node.name": "9393040", "ci.pipeline.id": "gitlab-pipeline-id", "ci.pipeline.name": "gitlab-pipeline-name", "ci.pipeline.number": "gitlab-pipeline-number", + "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", "ci.provider.name": "gitlab", "ci.stage.name": "gitlab-stage-name", + "ci.workspace_path": "/foo/bar", + "git.branch": "master", "git.commit.author.date": "2021-07-21T11:43:07-04:00", "git.commit.author.email": "john@doe.com", "git.commit.author.name": "John Doe", "git.commit.message": "gitlab-git-commit-message", - "git.commit.sha": "gitlab-git-commit" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://gitlab.com/repo/myrepo.git" } ] ] diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/jenkins.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/jenkins.json index b9ff0a3442d..3e791b5f7ff 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/jenkins.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/jenkins.json @@ -3,90 +3,90 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL_1": "sample", - "GIT_URL_2": "otherSample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL_1": "https://jenkins.com/repo/sample.git", + "GIT_URL_2": "https://jenkins.com/repo/otherSample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url" + "JOB_URL": "https://jenkins.com/job" }, { "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url" + "JOB_URL": "https://jenkins.com/job" }, { "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url" + "JOB_URL": "https://jenkins.com/job" }, { "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "foo/bar" }, { @@ -94,26 +94,26 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar~" }, { @@ -121,26 +121,26 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar~", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/~/bar" }, { @@ -148,27 +148,27 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/~/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "HOME": "/not-my-home", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "USERPROFILE": "/not-my-home", "WORKSPACE": "~/foo/bar" }, @@ -177,27 +177,27 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/not-my-home/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "HOME": "/not-my-home", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "USERPROFILE": "/not-my-home", "WORKSPACE": "~foo/bar" }, @@ -206,27 +206,27 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "~foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "HOME": "/not-my-home", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "USERPROFILE": "/not-my-home", "WORKSPACE": "~" }, @@ -235,26 +235,26 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/not-my-home", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -262,26 +262,26 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "refs/heads/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName/master", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -289,26 +289,26 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "refs/heads/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName/another", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -316,26 +316,26 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName/another", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "refs/heads/feature/one", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName/feature/one", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -343,26 +343,26 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "feature/one", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "refs/heads/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName/KEY1=VALUE1,KEY2=VALUE2", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -370,26 +370,26 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "refs/heads/master", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName/KEY1=VALUE1,KEY2=VALUE2/master", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -397,36 +397,36 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git" } ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/tags/0.1.0", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git", "git.tag": "0.1.0" } ], @@ -434,24 +434,24 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "refs/heads/tags/0.1.0", - "GIT_COMMIT": "jenkins-git-commit", - "GIT_URL": "sample", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL": "https://jenkins.com/repo/sample.git", "JENKINS_URL": "jenkins", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", - "git.commit.sha": "jenkins-git-commit", - "git.repository_url": "sample", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://jenkins.com/repo/sample.git", "git.tag": "0.1.0" } ], @@ -459,14 +459,14 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_URL": "http://hostname.com/repo.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -474,11 +474,11 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -486,14 +486,14 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_URL": "http://user@hostname.com/repo.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -501,11 +501,11 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -513,14 +513,14 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_URL": "http://user%E2%82%AC@hostname.com/repo.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -528,11 +528,11 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -540,14 +540,14 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_URL": "http://user:pwd@hostname.com/repo.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -555,11 +555,11 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "http://hostname.com/repo.git" } ], @@ -567,14 +567,14 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "GIT_BRANCH": "origin/master", - "GIT_COMMIT": "jenkins-git-commit", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_URL": "git@hostname.com:org/repo.git", "JENKINS_URL": "jenkins", "JOB_NAME": "jobName", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "WORKSPACE": "/foo/bar" }, { @@ -582,11 +582,11 @@ "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.name": "jobName", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.sha": "jenkins-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "git@hostname.com:org/repo.git" } ], @@ -594,7 +594,7 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "DD_GIT_BRANCH": "user-supplied-branch", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", @@ -606,15 +606,15 @@ "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git", - "GIT_COMMIT": "jenkins-git-commit", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "JENKINS_URL": "jenkins", - "JOB_URL": "jenkins-job-url" + "JOB_URL": "https://jenkins.com/job" }, { "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "git.branch": "user-supplied-branch", "git.commit.author.date": "usersupplied-authordate", @@ -632,7 +632,7 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", @@ -644,15 +644,15 @@ "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git", "DD_GIT_TAG": "0.0.2", - "GIT_COMMIT": "jenkins-git-commit", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "JENKINS_URL": "jenkins", - "JOB_URL": "jenkins-job-url" + "JOB_URL": "https://jenkins.com/job" }, { "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", "git.commit.author.date": "usersupplied-authordate", "git.commit.author.email": "usersupplied-authoremail", @@ -670,20 +670,20 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", - "GIT_COMMIT": "jenkins-git-commit", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_URL_1": "https://user:password@github.com/DataDog/dogweb.git", "JENKINS_URL": "jenkins", - "JOB_URL": "jenkins-job-url" + "JOB_URL": "https://jenkins.com/job" }, { "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", - "git.commit.sha": "jenkins-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/DataDog/dogweb.git" } ], @@ -691,11 +691,11 @@ { "BUILD_NUMBER": "jenkins-pipeline-number", "BUILD_TAG": "jenkins-pipeline-id", - "BUILD_URL": "jenkins-pipeline-url", + "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", - "GIT_COMMIT": "jenkins-git-commit", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "JENKINS_URL": "jenkins", - "JOB_URL": "jenkins-job-url", + "JOB_URL": "https://jenkins.com/job", "NODE_LABELS": "built-in linux", "NODE_NAME": "my-node" }, @@ -705,9 +705,9 @@ "ci.node.name": "my-node", "ci.pipeline.id": "jenkins-pipeline-id", "ci.pipeline.number": "jenkins-pipeline-number", - "ci.pipeline.url": "jenkins-pipeline-url", + "ci.pipeline.url": "https://jenkins.com/pipeline", "ci.provider.name": "jenkins", - "git.commit.sha": "jenkins-git-commit" + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123" } ] ] diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/teamcity.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/teamcity.json index 29191aa8e63..086c1c16de1 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/teamcity.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/teamcity.json @@ -1,19 +1,19 @@ [ [ { - "BUILD_URL": "the-build-url", + "BUILD_URL": "https://teamcity.com/repo", "TEAMCITY_BUILDCONF_NAME": "Test 1", "TEAMCITY_VERSION": "2022.10 (build 116751)" }, { "ci.job.name": "Test 1", - "ci.job.url": "the-build-url", + "ci.job.url": "https://teamcity.com/repo", "ci.provider.name": "teamcity" } ], [ { - "BUILD_URL": "the-build-url", + "BUILD_URL": "https://teamcity.com/repo", "DD_GIT_BRANCH": "user-supplied-branch", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", @@ -29,7 +29,7 @@ }, { "ci.job.name": "Test 1", - "ci.job.url": "the-build-url", + "ci.job.url": "https://teamcity.com/repo", "ci.provider.name": "teamcity", "git.branch": "user-supplied-branch", "git.commit.author.date": "usersupplied-authordate", @@ -45,7 +45,7 @@ ], [ { - "BUILD_URL": "the-build-url", + "BUILD_URL": "https://teamcity.com/repo", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", @@ -61,7 +61,7 @@ }, { "ci.job.name": "Test 1", - "ci.job.url": "the-build-url", + "ci.job.url": "https://teamcity.com/repo", "ci.provider.name": "teamcity", "git.commit.author.date": "usersupplied-authordate", "git.commit.author.email": "usersupplied-authoremail", diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/travisci.json b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/travisci.json index a73309d2678..2131938546f 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/travisci.json +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/travisci.json @@ -6,23 +6,23 @@ "TRAVIS_BUILD_DIR": "/foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo", "TRAVIS_TAG": "origin/tags/0.1.0" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/bar", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git", "git.tag": "0.1.0" } @@ -34,23 +34,23 @@ "TRAVIS_BUILD_DIR": "/foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo", "TRAVIS_TAG": "refs/heads/tags/0.1.0" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/bar", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git", "git.tag": "0.1.0" } @@ -61,22 +61,22 @@ "TRAVIS_BRANCH": "origin/master", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -87,23 +87,23 @@ "TRAVIS_BUILD_DIR": "foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "foo/bar", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -114,23 +114,23 @@ "TRAVIS_BUILD_DIR": "/foo/bar~", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/bar~", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -141,23 +141,23 @@ "TRAVIS_BUILD_DIR": "/foo/~/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/~/bar", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -169,24 +169,24 @@ "TRAVIS_BUILD_DIR": "~/foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo", "USERPROFILE": "/not-my-home" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/not-my-home/foo/bar", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -197,23 +197,23 @@ "TRAVIS_BUILD_DIR": "~foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "~foo/bar", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -225,24 +225,24 @@ "TRAVIS_BUILD_DIR": "~", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo", "USERPROFILE": "/not-my-home" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/not-my-home", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -253,23 +253,23 @@ "TRAVIS_BUILD_DIR": "/foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -280,23 +280,23 @@ "TRAVIS_BUILD_DIR": "/foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -307,23 +307,23 @@ "TRAVIS_BUILD_DIR": "/foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/bar", "git.branch": "feature/one", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -334,25 +334,25 @@ "TRAVIS_BUILD_DIR": "/foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_PULL_REQUEST_BRANCH": "origin/master", "TRAVIS_PULL_REQUEST_SLUG": "user/repo", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -363,25 +363,25 @@ "TRAVIS_BUILD_DIR": "/foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_PULL_REQUEST_BRANCH": "refs/heads/master", "TRAVIS_PULL_REQUEST_SLUG": "user/repo", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/bar", "git.branch": "master", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -392,25 +392,25 @@ "TRAVIS_BUILD_DIR": "/foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_PULL_REQUEST_BRANCH": "refs/heads/feature/one", "TRAVIS_PULL_REQUEST_SLUG": "user/repo", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/bar", "git.branch": "feature/one", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git" } ], @@ -429,18 +429,18 @@ "TRAVIS": "travisCI", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "git.branch": "user-supplied-branch", "git.commit.author.date": "usersupplied-authordate", @@ -469,18 +469,18 @@ "TRAVIS": "travisCI", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "git.commit.author.date": "usersupplied-authordate", "git.commit.author.email": "usersupplied-authoremail", @@ -501,23 +501,23 @@ "TRAVIS_BUILD_DIR": "/foo/bar", "TRAVIS_BUILD_ID": "travis-pipeline-id", "TRAVIS_BUILD_NUMBER": "travis-pipeline-number", - "TRAVIS_BUILD_WEB_URL": "travis-pipeline-url", - "TRAVIS_COMMIT": "travis-git-commit", + "TRAVIS_BUILD_WEB_URL": "https://travisci.com/pipeline", + "TRAVIS_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "TRAVIS_COMMIT_MESSAGE": "travis-commit-message", - "TRAVIS_JOB_WEB_URL": "travis-job-url", + "TRAVIS_JOB_WEB_URL": "https://travisci.com/job", "TRAVIS_REPO_SLUG": "user/repo", "TRAVIS_TAG": "origin/tags/0.1.0" }, { - "ci.job.url": "travis-job-url", + "ci.job.url": "https://travisci.com/job", "ci.pipeline.id": "travis-pipeline-id", "ci.pipeline.name": "user/repo", "ci.pipeline.number": "travis-pipeline-number", - "ci.pipeline.url": "travis-pipeline-url", + "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "ci.workspace_path": "/foo/bar", "git.commit.message": "travis-commit-message", - "git.commit.sha": "travis-git-commit", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/user/repo.git", "git.tag": "0.1.0" } diff --git a/dd-java-agent/agent-ci-visibility/src/testFixtures/groovy/datadog/trace/civisibility/CiVisibilityTest.groovy b/dd-java-agent/agent-ci-visibility/src/testFixtures/groovy/datadog/trace/civisibility/CiVisibilityTest.groovy index 8ea5cf4c0bf..cd818861261 100644 --- a/dd-java-agent/agent-ci-visibility/src/testFixtures/groovy/datadog/trace/civisibility/CiVisibilityTest.groovy +++ b/dd-java-agent/agent-ci-visibility/src/testFixtures/groovy/datadog/trace/civisibility/CiVisibilityTest.groovy @@ -2,26 +2,26 @@ package datadog.trace.civisibility import datadog.trace.agent.test.AgentTestRunner import datadog.trace.agent.test.asserts.TraceAssert -import datadog.trace.civisibility.coverage.NoopCoverageProbeStore import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.civisibility.InstrumentationBridge import datadog.trace.api.civisibility.config.ModuleExecutionSettings import datadog.trace.api.civisibility.config.SkippableTest -import datadog.trace.api.civisibility.source.SourcePathResolver import datadog.trace.api.config.CiVisibilityConfig import datadog.trace.api.config.GeneralConfig import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.civisibility.codeowners.Codeowners import datadog.trace.civisibility.config.JvmInfoFactory import datadog.trace.civisibility.config.ModuleExecutionSettingsFactory +import datadog.trace.civisibility.coverage.NoopCoverageProbeStore import datadog.trace.civisibility.decorator.TestDecorator import datadog.trace.civisibility.decorator.TestDecoratorImpl import datadog.trace.civisibility.events.BuildEventsHandlerImpl import datadog.trace.civisibility.events.TestEventsHandlerImpl import datadog.trace.civisibility.ipc.SignalServer import datadog.trace.civisibility.source.MethodLinesResolver +import datadog.trace.civisibility.source.SourcePathResolver import datadog.trace.civisibility.source.index.RepoIndexBuilder import datadog.trace.core.DDSpan import datadog.trace.util.Strings @@ -30,6 +30,7 @@ import spock.lang.Unroll import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths +import java.util.regex.Pattern @Unroll abstract class CiVisibilityTest extends AgentTestRunner { @@ -43,10 +44,12 @@ abstract class CiVisibilityTest extends AgentTestRunner { static final int DUMMY_TEST_METHOD_START = 12 static final int DUMMY_TEST_METHOD_END = 18 static final Collection DUMMY_CODE_OWNERS = ["owner1", "owner2"] + static final Pattern ANY_MESSAGE = Pattern.compile(".*") private static Path agentKeyFile private static final List skippableTests = new ArrayList<>() + private static volatile boolean itrEnabled = false def setupSpec() { def currentPath = Paths.get("").toAbsolutePath() @@ -65,19 +68,45 @@ abstract class CiVisibilityTest extends AgentTestRunner { def moduleExecutionSettingsFactory = Stub(ModuleExecutionSettingsFactory) moduleExecutionSettingsFactory.create(_, _) >> { Map properties = [ - (CiVisibilityConfig.CIVISIBILITY_ITR_ENABLED) : String.valueOf(!skippableTests.isEmpty()) + (CiVisibilityConfig.CIVISIBILITY_ITR_ENABLED): String.valueOf(itrEnabled) ] - return new ModuleExecutionSettings(properties, Collections.singletonMap(dummyModule, skippableTests)) + return new ModuleExecutionSettings(false, itrEnabled, properties, Collections.singletonMap(dummyModule, skippableTests), Collections.emptyList()) } - DDTestSessionImpl.SessionImplFactory sessionFactory = (String projectName, Path projectRoot, String component, Long startTime) -> { + def coverageProbeStoreFactory = new NoopCoverageProbeStore.NoopCoverageProbeStoreFactory() + DDTestFrameworkSession.Factory testFrameworkSessionFactory = (String projectName, Path projectRoot, String component, Long startTime) -> { + def ciTags = [(DUMMY_CI_TAG): DUMMY_CI_TAG_VALUE] + TestDecorator testDecorator = new TestDecoratorImpl(component, ciTags) + return new DDTestFrameworkSessionImpl( + projectName, + startTime, + Config.get(), + testDecorator, + sourcePathResolver, + codeowners, + methodLinesResolver, + coverageProbeStoreFactory, + moduleExecutionSettingsFactory, + ) + } + + InstrumentationBridge.registerTestEventsHandlerFactory { + component, path -> + DDTestFrameworkSession testSession = testFrameworkSessionFactory.startSession(dummyModule, path, component, null) + DDTestFrameworkModule testModule = testSession.testModuleStart(dummyModule, null) + new TestEventsHandlerImpl(testSession, testModule) + } + + DDBuildSystemSession.Factory buildSystemSessionFactory = (String projectName, Path projectRoot, String startCommand, String component, Long startTime) -> { def ciTags = [(DUMMY_CI_TAG): DUMMY_CI_TAG_VALUE] TestDecorator testDecorator = new TestDecoratorImpl(component, ciTags) TestModuleRegistry testModuleRegistry = new TestModuleRegistry() SignalServer signalServer = new SignalServer() RepoIndexBuilder repoIndexBuilder = Stub(RepoIndexBuilder) - return new DDTestSessionParent( + return new DDBuildSystemSessionImpl( projectName, + rootPath.toString(), + startCommand, startTime, Config.get(), testModuleRegistry, @@ -86,32 +115,28 @@ abstract class CiVisibilityTest extends AgentTestRunner { codeowners, methodLinesResolver, moduleExecutionSettingsFactory, + coverageProbeStoreFactory, signalServer, repoIndexBuilder ) } - InstrumentationBridge.registerTestEventsHandlerFactory { - component, path -> - DDTestSessionImpl testSession = sessionFactory.startSession(dummyModule, path, component, null) - DDTestModuleImpl testModule = testSession.testModuleStart(dummyModule, null) - new TestEventsHandlerImpl(testSession, testModule) - } - InstrumentationBridge.registerBuildEventsHandlerFactory { - decorator -> new BuildEventsHandlerImpl<>(sessionFactory, new JvmInfoFactory()) + decorator -> new BuildEventsHandlerImpl<>(buildSystemSessionFactory, new JvmInfoFactory()) } - InstrumentationBridge.registerCoverageProbeStoreFactory(new NoopCoverageProbeStore.NoopCoverageProbeStoreFactory()) + InstrumentationBridge.registerCoverageProbeStoreRegistry(coverageProbeStoreFactory) } @Override void setup() { skippableTests.clear() + itrEnabled = false } def givenSkippableTests(List tests) { skippableTests.addAll(tests) + itrEnabled = true } @Override @@ -137,7 +162,8 @@ abstract class CiVisibilityTest extends AgentTestRunner { final String resource = null, final String testCommand = null, final String testToolchain = null, - final Throwable exception = null) { + final Throwable exception = null, + final boolean verifyExceptionMessage = true) { def testFramework = expectedTestFramework() def testFrameworkVersion = expectedTestFrameworkVersion() @@ -157,6 +183,9 @@ abstract class CiVisibilityTest extends AgentTestRunner { "$Tags.TEST_TYPE" TestDecorator.TEST_TYPE if (testCommand) { "$Tags.TEST_COMMAND" testCommand + } else { + // the default command for sessions that run without build system instrumentation + "$Tags.TEST_COMMAND" dummyModule } if (testToolchain) { "$Tags.TEST_TOOLCHAIN" testToolchain @@ -171,7 +200,11 @@ abstract class CiVisibilityTest extends AgentTestRunner { } if (exception) { - errorTags(exception.class, exception.message) + if (verifyExceptionMessage) { + errorTags(exception.class, exception.message) + } else { + errorTags(exception.class, ANY_MESSAGE) + } } "$DUMMY_CI_TAG" DUMMY_CI_TAG_VALUE @@ -199,7 +232,8 @@ abstract class CiVisibilityTest extends AgentTestRunner { final String testStatus, final Map testTags = null, final Throwable exception = null, - final String resource = null) { + final String resource = null, + final boolean verifyExceptionMessage = true) { def testFramework = expectedTestFramework() def testFrameworkVersion = expectedTestFrameworkVersion() @@ -228,7 +262,11 @@ abstract class CiVisibilityTest extends AgentTestRunner { } if (exception) { - errorTags(exception.class, exception.message) + if (verifyExceptionMessage) { + errorTags(exception.class, exception.message) + } else { + errorTags(exception.class, ANY_MESSAGE) + } } "$DUMMY_CI_TAG" DUMMY_CI_TAG_VALUE @@ -329,7 +367,7 @@ abstract class CiVisibilityTest extends AgentTestRunner { return testSuiteId } - void testSpan(final TraceAssert trace, + long testSpan(final TraceAssert trace, final int index, final Long testSessionId, final Long testModuleId, @@ -342,11 +380,15 @@ abstract class CiVisibilityTest extends AgentTestRunner { final Throwable exception = null, final boolean emptyDuration = false, final Collection categories = null, - final boolean sourceFilePresent = true) { + final boolean sourceFilePresent = true, + final boolean sourceMethodPresent = true) { def testFramework = expectedTestFramework() def testFrameworkVersion = expectedTestFrameworkVersion() + def testId trace.span(index) { + testId = span.getSpanId() + parent() operationName expectedOperationPrefix() + ".test" resourceName "$testSuite.$testName" @@ -376,10 +418,13 @@ abstract class CiVisibilityTest extends AgentTestRunner { if (sourceFilePresent) { "$Tags.TEST_SOURCE_FILE" DUMMY_SOURCE_PATH + "$Tags.TEST_CODEOWNERS" Strings.toJson(DUMMY_CODE_OWNERS) + } + + if (sourceMethodPresent) { "$Tags.TEST_SOURCE_METHOD" testMethod "$Tags.TEST_SOURCE_START" DUMMY_TEST_METHOD_START "$Tags.TEST_SOURCE_END" DUMMY_TEST_METHOD_END - "$Tags.TEST_CODEOWNERS" Strings.toJson(DUMMY_CODE_OWNERS) } if (exception) { @@ -404,6 +449,7 @@ abstract class CiVisibilityTest extends AgentTestRunner { defaultTags() } } + return testId } String component = component() diff --git a/dd-java-agent/agent-debugger/build.gradle b/dd-java-agent/agent-debugger/build.gradle index 91c875c8bc4..762229e19cb 100644 --- a/dd-java-agent/agent-debugger/build.gradle +++ b/dd-java-agent/agent-debugger/build.gradle @@ -15,7 +15,6 @@ excludedClassesCoverage += [ 'com.datadog.debugger.agent.DebuggerProbe.When.Threshold', 'com.datadog.debugger.agent.DebuggerAgent.ShutdownHook', 'com.datadog.debugger.agent.DebuggerAgent', - 'com.datadog.debugger.uploader.BatchUploader', // too old for this coverage (JDK 1.2) 'antlr.*', 'com.datadog.debugger.util.MoshiSnapshotHelper' // only static classes diff --git a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/CapturedContext.java b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/CapturedContext.java index 7f122fa6264..d0b062f281c 100644 --- a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/CapturedContext.java +++ b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/CapturedContext.java @@ -1,9 +1,12 @@ package datadog.trace.bootstrap.debugger; +import static datadog.trace.bootstrap.debugger.util.Redaction.REDACTED_VALUE; + import datadog.trace.bootstrap.debugger.el.ReflectiveFieldValueResolver; import datadog.trace.bootstrap.debugger.el.ValueReferenceResolver; import datadog.trace.bootstrap.debugger.el.ValueReferences; import datadog.trace.bootstrap.debugger.el.Values; +import datadog.trace.bootstrap.debugger.util.Redaction; import datadog.trace.bootstrap.debugger.util.TimeoutChecker; import java.util.ArrayList; import java.util.Collections; @@ -106,6 +109,9 @@ public Object getMember(Object target, String memberName) { if (target == Values.UNDEFINED_OBJECT) { return target; } + if (Redaction.isRedactedKeyword(memberName)) { + return REDACTED_VALUE; + } if (target instanceof CapturedValue) { Map fields = ((CapturedValue) target).fields; if (fields.containsKey(memberName)) { @@ -318,12 +324,12 @@ public Status evaluate( if (methodLocation == MethodLocation.EXIT) { duration = System.nanoTime() - startTimestamp; addExtension( - ValueReferences.DURATION_EXTENSION_NAME, duration / 1000 / 1000); // convert to ms + ValueReferences.DURATION_EXTENSION_NAME, duration / 1_000_000.0); // convert to ms } this.thisClassName = thisClassName; boolean shouldEvaluate = resolveEvaluateAt(probeImplementation, methodLocation); if (shouldEvaluate) { - probeImplementation.evaluate(this, status); + probeImplementation.evaluate(this, status, methodLocation); } return status; } @@ -334,11 +340,7 @@ private static boolean resolveEvaluateAt( // line probe, no evaluation of probe's evaluateAt return true; } - MethodLocation localEvaluateAt = probeImplementation.getEvaluateAt(); - if (methodLocation == MethodLocation.ENTRY) { - return localEvaluateAt == MethodLocation.DEFAULT || localEvaluateAt == MethodLocation.ENTRY; - } - return localEvaluateAt == methodLocation; + return MethodLocation.isSame(methodLocation, probeImplementation.getEvaluateAt()); } public Status getStatus(String probeId) { @@ -537,6 +539,9 @@ public static CapturedValue raw( private static CapturedValue build( String name, String declaredType, Object value, Limits limits, String notCapturedReason) { + if (Redaction.isRedactedKeyword(name)) { + value = REDACTED_VALUE; + } CapturedValue val = new CapturedValue( name, declaredType, value, limits, Collections.emptyMap(), notCapturedReason); diff --git a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/DebuggerContext.java b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/DebuggerContext.java index 56a4f6723d0..2ec97949aad 100644 --- a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/DebuggerContext.java +++ b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/DebuggerContext.java @@ -4,6 +4,7 @@ import datadog.trace.bootstrap.debugger.util.TimeoutChecker; import java.time.Duration; import java.time.temporal.ChronoUnit; +import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -190,19 +191,9 @@ public static DebuggerSpan createSpan(String operationName, String[] tags) { * * @return true if can proceed to capture data */ - public static boolean isReadyToCapture(String... probeIds) { - // TODO provide overloaded version without string array + public static boolean isReadyToCapture(Class callingClass, String... probeIds) { try { - if (probeIds == null || probeIds.length == 0) { - return false; - } - boolean result = false; - for (String probeId : probeIds) { - // if all probes are rate limited, we don't capture - result |= ProbeRateLimiter.tryProbe(probeId); - } - result = result && checkAndSetInProbe(); - return result; + return checkAndSetInProbe(); } catch (Exception ex) { LOGGER.debug("Error in isReadyToCapture: ", ex); return false; @@ -269,6 +260,7 @@ public static void evalContext( public static void evalContextAndCommit( CapturedContext context, Class callingClass, int line, String... probeIds) { try { + List probeImplementations = new ArrayList<>(); for (String probeId : probeIds) { ProbeImplementation probeImplementation = resolveProbe(probeId, callingClass); if (probeImplementation == null) { @@ -276,6 +268,9 @@ public static void evalContextAndCommit( } context.evaluate( probeId, probeImplementation, callingClass.getTypeName(), -1, MethodLocation.DEFAULT); + probeImplementations.add(probeImplementation); + } + for (ProbeImplementation probeImplementation : probeImplementations) { probeImplementation.commit(context, line); } } catch (Exception ex) { diff --git a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/MethodLocation.java b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/MethodLocation.java index f07868286c4..73c5fe212c0 100644 --- a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/MethodLocation.java +++ b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/MethodLocation.java @@ -3,5 +3,12 @@ public enum MethodLocation { DEFAULT, ENTRY, - EXIT + EXIT; + + public static boolean isSame(MethodLocation methodLocation, MethodLocation evaluateAt) { + if (methodLocation == MethodLocation.ENTRY) { + return evaluateAt == MethodLocation.DEFAULT || evaluateAt == MethodLocation.ENTRY; + } + return methodLocation == evaluateAt; + } } diff --git a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/ProbeImplementation.java b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/ProbeImplementation.java index 80445b61fec..617b3aa6093 100644 --- a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/ProbeImplementation.java +++ b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/ProbeImplementation.java @@ -16,7 +16,8 @@ public interface ProbeImplementation { String getStrTags(); - void evaluate(CapturedContext context, CapturedContext.Status status); + void evaluate( + CapturedContext context, CapturedContext.Status status, MethodLocation methodLocation); void commit( CapturedContext entryContext, @@ -84,7 +85,8 @@ public String getStrTags() { } @Override - public void evaluate(CapturedContext context, CapturedContext.Status status) {} + public void evaluate( + CapturedContext context, CapturedContext.Status status, MethodLocation methodLocation) {} @Override public void commit( diff --git a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/ProbeRateLimiter.java b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/ProbeRateLimiter.java index 55258c35ca9..e7c7aa22f8d 100644 --- a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/ProbeRateLimiter.java +++ b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/ProbeRateLimiter.java @@ -1,13 +1,19 @@ package datadog.trace.bootstrap.debugger; import datadog.trace.api.sampling.AdaptiveSampler; +import datadog.trace.api.sampling.ConstantSampler; +import datadog.trace.api.sampling.Sampler; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.function.DoubleFunction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Rate limiter for sending snapshot to backend Use a global rate limiter and one per probe */ public class ProbeRateLimiter { + private static final Logger LOGGER = LoggerFactory.getLogger(ProbeRateLimiter.class); public static final double DEFAULT_SNAPSHOT_RATE = 1.0; public static final double DEFAULT_LOG_RATE = 5000.0; private static final Duration ONE_SECOND_WINDOW = Duration.of(1, ChronoUnit.SECONDS); @@ -16,15 +22,14 @@ public class ProbeRateLimiter { private static final double DEFAULT_GLOBAL_LOG_RATE = 5000.0; private static final ConcurrentMap PROBE_SAMPLERS = new ConcurrentHashMap<>(); - private static AdaptiveSampler GLOBAL_SNAPSHOT_SAMPLER = - createSampler(DEFAULT_GLOBAL_SNAPSHOT_RATE); - private static AdaptiveSampler GLOBAL_LOG_SAMPLER = createSampler(DEFAULT_GLOBAL_LOG_RATE); + private static Sampler GLOBAL_SNAPSHOT_SAMPLER = createSampler(DEFAULT_GLOBAL_SNAPSHOT_RATE); + private static Sampler GLOBAL_LOG_SAMPLER = createSampler(DEFAULT_GLOBAL_LOG_RATE); + private static DoubleFunction samplerSupplier = ProbeRateLimiter::createSampler; public static boolean tryProbe(String probeId) { RateLimitInfo rateLimitInfo = - PROBE_SAMPLERS.computeIfAbsent( - probeId, k -> new RateLimitInfo(createSampler(DEFAULT_SNAPSHOT_RATE), true)); - AdaptiveSampler globalSampler = + PROBE_SAMPLERS.computeIfAbsent(probeId, ProbeRateLimiter::getDefaultRateLimitInfo); + Sampler globalSampler = rateLimitInfo.isCaptureSnapshot ? GLOBAL_SNAPSHOT_SAMPLER : GLOBAL_LOG_SAMPLER; if (globalSampler.sample()) { return rateLimitInfo.sampler.sample(); @@ -32,16 +37,21 @@ public static boolean tryProbe(String probeId) { return false; } + private static RateLimitInfo getDefaultRateLimitInfo(String probeId) { + LOGGER.debug("Setting sampling with default snapshot rate for probeId={}", probeId); + return new RateLimitInfo(samplerSupplier.apply(DEFAULT_SNAPSHOT_RATE), true); + } + public static void setRate(String probeId, double rate, boolean isCaptureSnapshot) { - PROBE_SAMPLERS.put(probeId, new RateLimitInfo(createSampler(rate), isCaptureSnapshot)); + PROBE_SAMPLERS.put(probeId, new RateLimitInfo(samplerSupplier.apply(rate), isCaptureSnapshot)); } public static void setGlobalSnapshotRate(double rate) { - GLOBAL_SNAPSHOT_SAMPLER = createSampler(rate); + GLOBAL_SNAPSHOT_SAMPLER = samplerSupplier.apply(rate); } public static void setGlobalLogRate(double rate) { - GLOBAL_LOG_SAMPLER = createSampler(rate); + GLOBAL_LOG_SAMPLER = samplerSupplier.apply(rate); } public static void resetRate(String probeId) { @@ -57,19 +67,27 @@ public static void resetAll() { resetGlobalRate(); } - private static AdaptiveSampler createSampler(double rate) { + public static void setSamplerSupplier(DoubleFunction samplerSupplier) { + ProbeRateLimiter.samplerSupplier = + samplerSupplier != null ? samplerSupplier : ProbeRateLimiter::createSampler; + } + + private static Sampler createSampler(double rate) { + if (rate < 0) { + return new ConstantSampler(true); + } if (rate < 1) { int intRate = (int) Math.round(rate * 10); - return new AdaptiveSampler(TEN_SECONDS_WINDOW, intRate, 180, 16); + return new AdaptiveSampler(TEN_SECONDS_WINDOW, intRate, 180, 16, true); } - return new AdaptiveSampler(ONE_SECOND_WINDOW, (int) Math.round(rate), 180, 16); + return new AdaptiveSampler(ONE_SECOND_WINDOW, (int) Math.round(rate), 180, 16, true); } private static class RateLimitInfo { - final AdaptiveSampler sampler; + final Sampler sampler; final boolean isCaptureSnapshot; - public RateLimitInfo(AdaptiveSampler sampler, boolean isCaptureSnapshot) { + public RateLimitInfo(Sampler sampler, boolean isCaptureSnapshot) { this.sampler = sampler; this.isCaptureSnapshot = isCaptureSnapshot; } diff --git a/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/util/Redaction.java b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/util/Redaction.java new file mode 100644 index 00000000000..41383d3c50a --- /dev/null +++ b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/util/Redaction.java @@ -0,0 +1,155 @@ +package datadog.trace.bootstrap.debugger.util; + +import datadog.trace.api.Config; +import datadog.trace.util.ClassNameTrie; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.regex.Pattern; + +public class Redaction { + // Need to be a unique instance (new String) for reference equality (==) and + // avoid internalization (intern) by the JVM because it's a string constant + public static final String REDACTED_VALUE = new String("redacted".toCharArray()); + + private static final Pattern COMMA_PATTERN = Pattern.compile(","); + private static final List PREDEFINED_KEYWORDS = + Arrays.asList( + "password", + "passwd", + "secret", + "apikey", + "auth", + "credentials", + "mysqlpwd", + "privatekey", + "token", + "ipaddress", + "session", + // django + "csrftoken", + "sessionid", + // wsgi + "remoteaddr", + "xcsrftoken", + "xforwardedfor", + "setcookie", + "cookie", + "authorization", + "xapikey", + "xforwardedfor", + "xrealip"); + private static final Set KEYWORDS = ConcurrentHashMap.newKeySet(); + private static ClassNameTrie typeTrie = ClassNameTrie.Builder.EMPTY_TRIE; + private static List redactedClasses; + private static List redactedPackages; + + static { + /* + * based on sentry list: https://github.com/getsentry/sentry-python/blob/fefb454287b771ac31db4e30fa459d9be2f977b8/sentry_sdk/scrubber.py#L17-L58 + */ + KEYWORDS.addAll(PREDEFINED_KEYWORDS); + } + + public static void addUserDefinedKeywords(Config config) { + String redactedIdentifiers = config.getDebuggerRedactedIdentifiers(); + if (redactedIdentifiers == null) { + return; + } + String[] identifiers = COMMA_PATTERN.split(redactedIdentifiers); + for (String identifier : identifiers) { + KEYWORDS.add(normalize(identifier)); + } + } + + public static void addUserDefinedTypes(Config config) { + String redactedTypes = config.getDebuggerRedactedTypes(); + if (redactedTypes == null) { + return; + } + List packages = null; + List classes = null; + ClassNameTrie.Builder builder = new ClassNameTrie.Builder(); + String[] types = COMMA_PATTERN.split(redactedTypes); + for (String type : types) { + builder.put(type, 1); + if (type.endsWith("*")) { + if (packages == null) { + packages = new ArrayList<>(); + } + type = + type.endsWith(".*") + ? type.substring(0, type.length() - 2) + : type.substring(0, type.length() - 1); + packages.add(type); + } else { + if (classes == null) { + classes = new ArrayList<>(); + } + classes.add(type); + } + } + typeTrie = builder.buildTrie(); + redactedPackages = packages; + redactedClasses = classes; + } + + public static boolean isRedactedKeyword(String name) { + if (name == null) { + return false; + } + name = normalize(name); + return KEYWORDS.contains(name); + } + + public static boolean isRedactedType(String className) { + if (className == null) { + return false; + } + return typeTrie.apply(className) > 0; + } + + public static List getRedactedPackages() { + return redactedPackages != null ? redactedPackages : Collections.emptyList(); + } + + public static List getRedactedClasses() { + return redactedClasses != null ? redactedClasses : Collections.emptyList(); + } + + public static void clearUserDefinedTypes() { + typeTrie = ClassNameTrie.Builder.EMPTY_TRIE; + } + + public static void resetUserDefinedKeywords() { + KEYWORDS.clear(); + KEYWORDS.addAll(PREDEFINED_KEYWORDS); + } + + private static String normalize(String name) { + StringBuilder sb = null; + for (int i = 0; i < name.length(); i++) { + char c = name.charAt(i); + boolean isUpper = Character.isUpperCase(c); + boolean isRemovable = isRemovableChar(c); + if (isUpper || isRemovable || sb != null) { + if (sb == null) { + sb = new StringBuilder(name.substring(0, i)); + } + if (isUpper) { + sb.append(Character.toLowerCase(c)); + } else if (!isRemovable) { + sb.append(c); + } + } + } + return sb != null ? sb.toString() : name; + } + + private static boolean isRemovableChar(char c) { + return c == '_' || c == '-' || c == '$' || c == '@'; + } +} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/WellKnownClasses.java b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/util/WellKnownClasses.java similarity index 68% rename from dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/WellKnownClasses.java rename to dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/util/WellKnownClasses.java index 26758d40363..b5aa5a0b5fa 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/WellKnownClasses.java +++ b/dd-java-agent/agent-debugger/debugger-bootstrap/src/main/java/datadog/trace/bootstrap/debugger/util/WellKnownClasses.java @@ -1,7 +1,9 @@ -package com.datadog.debugger.util; +package datadog.trace.bootstrap.debugger.util; import java.util.Arrays; +import java.util.Collection; import java.util.HashSet; +import java.util.Map; import java.util.Set; public class WellKnownClasses { @@ -57,4 +59,30 @@ public static boolean isToStringFinalSafe(String type) { public static boolean isToStringSafe(String concreteType) { return toStringSafeClasses.contains(concreteType); } + + /** + * @return true if collection is the implementation of size method is side effect free and O(1) + * complexity + */ + public static boolean isSizeSafe(Collection collection) { + String className = collection.getClass().getTypeName(); + if (className.startsWith("java.")) { + // All Collection implementations from JDK base module are considered as safe + return true; + } + return false; + } + + /** + * @return true if map is the implementation of size method is side effect free and O(1) + * complexity + */ + public static boolean isSizeSafe(Map map) { + String className = map.getClass().getTypeName(); + if (className.startsWith("java.")) { + // All Map implementations from JDK base module are considered as safe + return true; + } + return false; + } } diff --git a/dd-java-agent/agent-debugger/debugger-bootstrap/src/test/java/datadog/trace/bootstrap/debugger/util/RedactionTest.java b/dd-java-agent/agent-debugger/debugger-bootstrap/src/test/java/datadog/trace/bootstrap/debugger/util/RedactionTest.java new file mode 100644 index 00000000000..243b65f899f --- /dev/null +++ b/dd-java-agent/agent-debugger/debugger-bootstrap/src/test/java/datadog/trace/bootstrap/debugger/util/RedactionTest.java @@ -0,0 +1,60 @@ +package datadog.trace.bootstrap.debugger.util; + +import static org.junit.jupiter.api.Assertions.*; + +import datadog.trace.api.Config; +import java.lang.reflect.Field; +import org.junit.jupiter.api.Test; + +class RedactionTest { + + @Test + public void basic() { + assertFalse(Redaction.isRedactedKeyword(null)); + assertFalse(Redaction.isRedactedKeyword("")); + assertFalse(Redaction.isRedactedKeyword("foobar")); + assertFalse(Redaction.isRedactedKeyword("@-_$")); + assertTrue(Redaction.isRedactedKeyword("password")); + assertTrue(Redaction.isRedactedKeyword("PassWord")); + assertTrue(Redaction.isRedactedKeyword("pass-word")); + assertTrue(Redaction.isRedactedKeyword("_Pass-Word_")); + assertTrue(Redaction.isRedactedKeyword("$pass_worD")); + assertTrue(Redaction.isRedactedKeyword("@passWord@")); + } + + @Test + public void userDefinedKeywords() { + Config config = Config.get(); + setFieldInConfig(config, "debuggerRedactedIdentifiers", "_MotDePasse,$Passwort"); + try { + Redaction.addUserDefinedKeywords(config); + assertTrue(Redaction.isRedactedKeyword("mot-de-passe")); + assertTrue(Redaction.isRedactedKeyword("Passwort")); + } finally { + Redaction.resetUserDefinedKeywords(); + } + } + + @Test + public void userDefinedTypes() { + Config config = Config.get(); + setFieldInConfig(config, "debuggerRedactedTypes", "java.security.Security,javax.security.*"); + try { + Redaction.addUserDefinedTypes(Config.get()); + assertTrue(Redaction.isRedactedType("java.security.Security")); + assertTrue(Redaction.isRedactedType("javax.security.SecurityContext")); + } finally { + Redaction.clearUserDefinedTypes(); + } + } + + private static void setFieldInConfig(Config config, String fieldName, Object value) { + try { + Field field = config.getClass().getDeclaredField(fieldName); + field.setAccessible(true); + field.set(config, value); + } catch (Throwable e) { + e.printStackTrace(); + } + } +} diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/BooleanValueExpressionAdapter.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/BooleanValueExpressionAdapter.java new file mode 100644 index 00000000000..5637e11c76c --- /dev/null +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/BooleanValueExpressionAdapter.java @@ -0,0 +1,30 @@ +package com.datadog.debugger.el; + +import com.datadog.debugger.el.expressions.BooleanExpression; +import com.datadog.debugger.el.expressions.ValueExpression; +import com.datadog.debugger.el.values.BooleanValue; +import datadog.trace.bootstrap.debugger.el.ValueReferenceResolver; + +public class BooleanValueExpressionAdapter implements ValueExpression { + + private final BooleanExpression booleanExpression; + + public BooleanValueExpressionAdapter(BooleanExpression booleanExpression) { + this.booleanExpression = booleanExpression; + } + + @Override + public BooleanValue evaluate(ValueReferenceResolver valueRefResolver) { + Boolean result = booleanExpression.evaluate(valueRefResolver); + if (result == null) { + throw new EvaluationException( + "Boolean expression returning null", PrettyPrintVisitor.print(this)); + } + return new BooleanValue(result); + } + + @Override + public R accept(Visitor visitor) { + return visitor.visit(booleanExpression); + } +} diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/DSL.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/DSL.java index e8943e8c0e7..c23704ec325 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/DSL.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/DSL.java @@ -16,6 +16,7 @@ import com.datadog.debugger.el.expressions.IndexExpression; import com.datadog.debugger.el.expressions.IsEmptyExpression; import com.datadog.debugger.el.expressions.LenExpression; +import com.datadog.debugger.el.expressions.MatchesExpression; import com.datadog.debugger.el.expressions.NotExpression; import com.datadog.debugger.el.expressions.StartsWithExpression; import com.datadog.debugger.el.expressions.StringPredicateExpression; @@ -192,7 +193,16 @@ public static StringPredicateExpression contains( return new ContainsExpression(valueExpression, str); } + public static StringPredicateExpression matches( + ValueExpression valueExpression, StringValue str) { + return new MatchesExpression(valueExpression, str); + } + public static WhenExpression when(BooleanExpression expression) { return new WhenExpression(expression); } + + public static BooleanValueExpressionAdapter bool(BooleanExpression expression) { + return new BooleanValueExpressionAdapter(expression); + } } diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/JsonToExpressionConverter.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/JsonToExpressionConverter.java index a69a79cadef..6f6eee3b1bb 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/JsonToExpressionConverter.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/JsonToExpressionConverter.java @@ -12,12 +12,42 @@ import com.squareup.moshi.JsonReader; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.function.BiFunction; /** Converts json representation to object model */ public class JsonToExpressionConverter { + private static final Set PREDICATE_FUNCTIONS = + new HashSet<>( + Arrays.asList( + "not", + "==", + "eq", + "!=", + "neq", + "ne", + ">=", + "ge", + ">", + "gt", + "<=", + "le", + "<", + "lt", + "or", + "and", + "hasAny", + "hasAll", + "isEmpty", + "startsWith", + "endsWith", + "contains", + "matches")); + @FunctionalInterface interface BinaryPredicateExpressionFunction { BooleanExpression apply(T left, T right); @@ -31,7 +61,13 @@ interface CompositePredicateExpressionFunction { public static BooleanExpression createPredicate(JsonReader reader) throws IOException { reader.beginObject(); String predicateType = reader.nextName(); - BooleanExpression expr = null; + BooleanExpression expr = internalCreatePredicate(reader, predicateType); + reader.endObject(); + return expr; + } + + private static BooleanExpression internalCreatePredicate(JsonReader reader, String predicateType) + throws IOException { switch (predicateType) { case "not": { @@ -40,8 +76,7 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce throw new UnsupportedOperationException( "Operation 'not' expects a predicate as its argument"); } - expr = DSL.not(createPredicate(reader)); - break; + return DSL.not(createPredicate(reader)); } case "==": case "eq": @@ -52,9 +87,9 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce "Operation 'eq' expects the arguments to be defined as array"); } reader.beginArray(); - expr = createBinaryValuePredicate(reader, DSL::eq); + BooleanExpression expr = createBinaryValuePredicate(reader, DSL::eq); reader.endArray(); - break; + return expr; } case "!=": case "neq": @@ -66,9 +101,9 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce "Operation 'ne' expects the arguments to be defined as array"); } reader.beginArray(); - expr = DSL.not(createBinaryValuePredicate(reader, DSL::eq)); + BooleanExpression expr = DSL.not(createBinaryValuePredicate(reader, DSL::eq)); reader.endArray(); - break; + return expr; } case ">=": case "ge": @@ -79,9 +114,9 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce "Operation 'ge' expects the arguments to be defined as array"); } reader.beginArray(); - expr = createBinaryValuePredicate(reader, DSL::ge); + BooleanExpression expr = createBinaryValuePredicate(reader, DSL::ge); reader.endArray(); - break; + return expr; } case ">": case "gt": @@ -92,9 +127,9 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce "Operation 'gt' expects the arguments to be defined as array"); } reader.beginArray(); - expr = createBinaryValuePredicate(reader, DSL::gt); + BooleanExpression expr = createBinaryValuePredicate(reader, DSL::gt); reader.endArray(); - break; + return expr; } case "<=": case "le": @@ -105,9 +140,9 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce "Operation 'le' expects the arguments to be defined as array"); } reader.beginArray(); - expr = createBinaryValuePredicate(reader, DSL::le); + BooleanExpression expr = createBinaryValuePredicate(reader, DSL::le); reader.endArray(); - break; + return expr; } case "<": case "lt": @@ -118,9 +153,9 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce "Operation 'lt' expects the arguments to be defined as array"); } reader.beginArray(); - expr = createBinaryValuePredicate(reader, DSL::lt); + BooleanExpression expr = createBinaryValuePredicate(reader, DSL::lt); reader.endArray(); - break; + return expr; } case "or": { @@ -130,9 +165,9 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce "Operation 'or' expects the arguments to be defined as array"); } reader.beginArray(); - expr = createCompositeLogicalPredicate(reader, DSL::or); + BooleanExpression expr = createCompositeLogicalPredicate(reader, DSL::or); reader.endArray(); - break; + return expr; } case "and": { @@ -142,9 +177,9 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce "Operation 'and' expects the arguments to be defined as array"); } reader.beginArray(); - expr = createCompositeLogicalPredicate(reader, DSL::and); + BooleanExpression expr = createCompositeLogicalPredicate(reader, DSL::and); reader.endArray(); - break; + return expr; } case "hasAny": { @@ -154,9 +189,9 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce "Operation 'hasAny' expects the arguments to be defined as array"); } reader.beginArray(); - expr = createHasAnyPredicate(reader); + BooleanExpression expr = createHasAnyPredicate(reader); reader.endArray(); - break; + return expr; } case "hasAll": { @@ -166,9 +201,9 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce "Operation 'hasAll' expects the arguments to be defined as array"); } reader.beginArray(); - expr = createHasAllPredicate(reader); + BooleanExpression expr = createHasAllPredicate(reader); reader.endArray(); - break; + return expr; } case "isEmpty": { @@ -177,29 +212,27 @@ public static BooleanExpression createPredicate(JsonReader reader) throws IOExce throw new UnsupportedOperationException( "Operation 'isEmpty' expects exactly one value argument"); } - expr = DSL.isEmpty(asValueExpression(reader)); - break; + return DSL.isEmpty(asValueExpression(reader)); } case "startsWith": { - expr = createStringPredicateExpression(reader, DSL::startsWith); - break; + return createStringPredicateExpression(reader, DSL::startsWith); } case "endsWith": { - expr = createStringPredicateExpression(reader, DSL::endsWith); - break; + return createStringPredicateExpression(reader, DSL::endsWith); } case "contains": { - expr = createStringPredicateExpression(reader, DSL::contains); - break; + return createStringPredicateExpression(reader, DSL::contains); + } + case "matches": + { + return createStringPredicateExpression(reader, DSL::matches); } default: throw new UnsupportedOperationException("Unsupported operation '" + predicateType + "'"); } - reader.endObject(); - return expr; } public static BooleanExpression createHasAnyPredicate(JsonReader reader) throws IOException { @@ -237,8 +270,8 @@ public static BooleanExpression createCompositeLogicalPredicate( } public static ValueExpression asValueExpression(JsonReader reader) throws IOException { - ValueExpression value; - switch (reader.peek()) { + JsonReader.Token currentToken = reader.peek(); + switch (currentToken) { case NUMBER: { // Moshi always consider numbers as decimal. need to parse it as string and detect if dot @@ -246,23 +279,23 @@ public static ValueExpression asValueExpression(JsonReader reader) throws IOE // or not to determine ints/longs vs doubles String numberStrValue = reader.nextString(); if (numberStrValue.indexOf('.') > 0) { - value = DSL.value(Double.parseDouble(numberStrValue)); - } else { - value = DSL.value(Long.parseLong(numberStrValue)); + return DSL.value(Double.parseDouble(numberStrValue)); } - break; + return DSL.value(Long.parseLong(numberStrValue)); } case STRING: { String textValue = reader.nextString(); - value = DSL.value(textValue); - break; + return DSL.value(textValue); } case BEGIN_OBJECT: { reader.beginObject(); try { String fieldName = reader.nextName(); + if (PREDICATE_FUNCTIONS.contains(fieldName)) { + return DSL.bool(internalCreatePredicate(reader, fieldName)); + } switch (fieldName) { case "ref": { @@ -345,13 +378,12 @@ public static ValueExpression asValueExpression(JsonReader reader) throws IOE case NULL: { reader.nextNull(); - value = DSL.nullValue(); - break; + return DSL.nullValue(); } default: - throw new UnsupportedOperationException("Invalid value definition: "); + throw new UnsupportedOperationException( + "Invalid value definition, not supported token: " + currentToken); } - return value; } private static StringPredicateExpression createStringPredicateExpression( diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/RedactedException.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/RedactedException.java new file mode 100644 index 00000000000..a2ec6cdb02d --- /dev/null +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/RedactedException.java @@ -0,0 +1,7 @@ +package com.datadog.debugger.el; + +public class RedactedException extends EvaluationException { + public RedactedException(String message, String expr) { + super(message, expr); + } +} diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/ValueScript.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/ValueScript.java index f35ac5f50ce..bcc940e1d5d 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/ValueScript.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/ValueScript.java @@ -1,9 +1,11 @@ package com.datadog.debugger.el; import com.datadog.debugger.el.expressions.GetMemberExpression; +import com.datadog.debugger.el.expressions.IndexExpression; import com.datadog.debugger.el.expressions.LenExpression; import com.datadog.debugger.el.expressions.ValueExpression; import com.datadog.debugger.el.expressions.ValueRefExpression; +import com.datadog.debugger.el.values.StringValue; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.JsonReader; import com.squareup.moshi.JsonWriter; @@ -131,6 +133,14 @@ public void toJson(JsonWriter jsonWriter, ValueScript value) throws IOException private void writeValueExpression(JsonWriter jsonWriter, ValueExpression expr) throws IOException { + if (expr instanceof Value) { + if (expr instanceof StringValue) { + jsonWriter.value(((StringValue) expr).getValue()); + } else { + throw new IOException("Unsupported operation: " + expr.getClass().getTypeName()); + } + return; + } jsonWriter.beginObject(); if (expr instanceof ValueRefExpression) { ValueRefExpression valueRefExpr = (ValueRefExpression) expr; @@ -146,6 +156,13 @@ private void writeValueExpression(JsonWriter jsonWriter, ValueExpression expr } else if (expr instanceof LenExpression) { jsonWriter.name("count"); writeValueExpression(jsonWriter, ((LenExpression) expr).getSource()); + } else if (expr instanceof IndexExpression) { + IndexExpression idxExpr = (IndexExpression) expr; + jsonWriter.name("index"); + jsonWriter.beginArray(); + writeValueExpression(jsonWriter, idxExpr.getTarget()); + writeValueExpression(jsonWriter, idxExpr.getKey()); + jsonWriter.endArray(); } else { throw new IOException("Unsupported operation: " + expr.getClass().getTypeName()); } diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/ExpressionHelper.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/ExpressionHelper.java new file mode 100644 index 00000000000..43d5930dcb6 --- /dev/null +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/ExpressionHelper.java @@ -0,0 +1,13 @@ +package com.datadog.debugger.el.expressions; + +import com.datadog.debugger.el.Expression; +import com.datadog.debugger.el.PrettyPrintVisitor; +import com.datadog.debugger.el.RedactedException; + +public class ExpressionHelper { + public static void throwRedactedException(Expression expr) { + String strExpr = PrettyPrintVisitor.print(expr); + throw new RedactedException( + "Could not evaluate the expression because '" + strExpr + "' was redacted", strExpr); + } +} diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/GetMemberExpression.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/GetMemberExpression.java index 0ca2e3e35f4..4b4cbbedba3 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/GetMemberExpression.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/GetMemberExpression.java @@ -2,9 +2,11 @@ import com.datadog.debugger.el.EvaluationException; import com.datadog.debugger.el.Generated; +import com.datadog.debugger.el.PrettyPrintVisitor; import com.datadog.debugger.el.Value; import com.datadog.debugger.el.Visitor; import datadog.trace.bootstrap.debugger.el.ValueReferenceResolver; +import datadog.trace.bootstrap.debugger.util.Redaction; import java.util.Objects; public class GetMemberExpression implements ValueExpression> { @@ -22,11 +24,17 @@ public Value evaluate(ValueReferenceResolver valueRefResolver) { if (targetValue == Value.undefined()) { return targetValue; } + Object member; try { - return Value.of(valueRefResolver.getMember(targetValue.getValue(), memberName)); + member = valueRefResolver.getMember(targetValue.getValue(), memberName); } catch (RuntimeException ex) { - throw new EvaluationException(ex.getMessage(), memberName, ex); + throw new EvaluationException(ex.getMessage(), PrettyPrintVisitor.print(this), ex); } + if (member == Redaction.REDACTED_VALUE + || (member != null && Redaction.isRedactedType(member.getClass().getTypeName()))) { + ExpressionHelper.throwRedactedException(this); + } + return Value.of(member); } @Generated diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/IndexExpression.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/IndexExpression.java index 7cd825bae50..d35be25572e 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/IndexExpression.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/IndexExpression.java @@ -7,6 +7,7 @@ import com.datadog.debugger.el.values.ListValue; import com.datadog.debugger.el.values.MapValue; import datadog.trace.bootstrap.debugger.el.ValueReferenceResolver; +import datadog.trace.bootstrap.debugger.util.Redaction; public class IndexExpression implements ValueExpression> { @@ -31,7 +32,12 @@ public Value evaluate(ValueReferenceResolver valueRefResolver) { } try { if (targetValue instanceof MapValue) { - result = ((MapValue) targetValue).get(keyValue.getValue()); + Object objKey = keyValue.getValue(); + if (objKey instanceof String && Redaction.isRedactedKeyword((String) objKey)) { + ExpressionHelper.throwRedactedException(this); + } else { + result = ((MapValue) targetValue).get(objKey); + } } if (targetValue instanceof ListValue) { result = ((ListValue) targetValue).get(keyValue.getValue()); @@ -39,6 +45,10 @@ public Value evaluate(ValueReferenceResolver valueRefResolver) { } catch (IllegalArgumentException ex) { throw new EvaluationException(ex.getMessage(), PrettyPrintVisitor.print(this), ex); } + Object obj = result.getValue(); + if (obj != null && Redaction.isRedactedType(obj.getClass().getTypeName())) { + ExpressionHelper.throwRedactedException(this); + } return result; } diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/LenExpression.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/LenExpression.java index e6029a9c828..669a00f5cd4 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/LenExpression.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/LenExpression.java @@ -1,5 +1,7 @@ package com.datadog.debugger.el.expressions; +import com.datadog.debugger.el.EvaluationException; +import com.datadog.debugger.el.PrettyPrintVisitor; import com.datadog.debugger.el.Value; import com.datadog.debugger.el.Visitor; import com.datadog.debugger.el.values.CollectionValue; @@ -27,14 +29,18 @@ public LenExpression(ValueExpression source) { @Override public Value evaluate(ValueReferenceResolver valueRefResolver) { Value materialized = source == null ? Value.nullValue() : source.evaluate(valueRefResolver); - if (materialized.isNull()) { - return (NumericValue) Value.of(-1); - } else if (materialized.isUndefined()) { - return (NumericValue) Value.of(0); - } else if (materialized instanceof StringValue) { - return (NumericValue) Value.of(((StringValue) materialized).length()); - } else if (materialized instanceof CollectionValue) { - return (NumericValue) Value.of(((CollectionValue) materialized).count()); + try { + if (materialized.isNull()) { + return (NumericValue) Value.of(-1); + } else if (materialized.isUndefined()) { + return (NumericValue) Value.of(0); + } else if (materialized instanceof StringValue) { + return (NumericValue) Value.of(((StringValue) materialized).length()); + } else if (materialized instanceof CollectionValue) { + return (NumericValue) Value.of(((CollectionValue) materialized).count()); + } + } catch (RuntimeException ex) { + throw new EvaluationException(ex.getMessage(), PrettyPrintVisitor.print(this)); } log.warn("Can not compute length for {}", materialized); return Value.undefined(); diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/ValueRefExpression.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/ValueRefExpression.java index 11dabc9116e..75a82b54f48 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/ValueRefExpression.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/expressions/ValueRefExpression.java @@ -2,9 +2,11 @@ import com.datadog.debugger.el.EvaluationException; import com.datadog.debugger.el.Generated; +import com.datadog.debugger.el.PrettyPrintVisitor; import com.datadog.debugger.el.Value; import com.datadog.debugger.el.Visitor; import datadog.trace.bootstrap.debugger.el.ValueReferenceResolver; +import datadog.trace.bootstrap.debugger.util.Redaction; import java.util.Objects; /** An expression taking a reference path and resolving to {@linkplain Value} */ @@ -17,11 +19,17 @@ public ValueRefExpression(String symbolName) { @Override public Value evaluate(ValueReferenceResolver valueRefResolver) { + Object symbol; try { - return Value.of(valueRefResolver.lookup(symbolName)); + symbol = valueRefResolver.lookup(symbolName); } catch (RuntimeException ex) { - throw new EvaluationException(ex.getMessage(), symbolName); + throw new EvaluationException(ex.getMessage(), PrettyPrintVisitor.print(this)); } + if (symbol == Redaction.REDACTED_VALUE + || (symbol != null && Redaction.isRedactedType(symbol.getClass().getTypeName()))) { + ExpressionHelper.throwRedactedException(this); + } + return Value.of(symbol); } @Generated diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/values/ListValue.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/values/ListValue.java index a2b5b032f9e..88dd5c71614 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/values/ListValue.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/values/ListValue.java @@ -5,6 +5,7 @@ import com.datadog.debugger.el.expressions.ValueExpression; import datadog.trace.bootstrap.debugger.el.ValueReferenceResolver; import datadog.trace.bootstrap.debugger.el.Values; +import datadog.trace.bootstrap.debugger.util.WellKnownClasses; import java.lang.reflect.Array; import java.util.Collection; import java.util.List; @@ -68,7 +69,12 @@ public boolean isEmpty() { public int count() { if (listHolder instanceof Collection) { - return ((Collection) listHolder).size(); + if (WellKnownClasses.isSizeSafe((Collection) listHolder)) { + return ((Collection) listHolder).size(); + } else { + throw new RuntimeException( + "Unsupported Collection class: " + listHolder.getClass().getTypeName()); + } } else if (listHolder == Value.nullValue()) { return 0; } else if (arrayHolder != null) { diff --git a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/values/MapValue.java b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/values/MapValue.java index 09ea8b36a2f..614179fc4a3 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/values/MapValue.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/main/java/com/datadog/debugger/el/values/MapValue.java @@ -5,6 +5,7 @@ import com.datadog.debugger.el.expressions.ValueExpression; import datadog.trace.bootstrap.debugger.el.ValueReferenceResolver; import datadog.trace.bootstrap.debugger.el.Values; +import datadog.trace.bootstrap.debugger.util.WellKnownClasses; import java.util.Collections; import java.util.Map; import java.util.Set; @@ -53,7 +54,11 @@ public boolean isEmpty() { public int count() { if (mapHolder instanceof Map) { - return ((Map) mapHolder).size(); + if (WellKnownClasses.isSizeSafe((Map) mapHolder)) { + return ((Map) mapHolder).size(); + } else { + throw new RuntimeException("Unsupported Map class: " + mapHolder.getClass().getTypeName()); + } } else if (mapHolder == Value.nullValue()) { return 0; } diff --git a/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/BooleanValueExpressionAdapterTest.java b/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/BooleanValueExpressionAdapterTest.java new file mode 100644 index 00000000000..3b793ecb1d6 --- /dev/null +++ b/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/BooleanValueExpressionAdapterTest.java @@ -0,0 +1,50 @@ +package com.datadog.debugger.el; + +import static org.junit.jupiter.api.Assertions.*; + +import com.datadog.debugger.el.expressions.BooleanExpression; +import com.datadog.debugger.el.values.BooleanValue; +import datadog.trace.bootstrap.debugger.el.ValueReferenceResolver; +import org.junit.jupiter.api.Test; + +class BooleanValueExpressionAdapterTest { + + @Test + public void testLiteral() { + { + BooleanValueExpressionAdapter booleanValueExpressionAdapter = + new BooleanValueExpressionAdapter(BooleanExpression.TRUE); + BooleanValue resultValue = booleanValueExpressionAdapter.evaluate(null); + assertTrue(resultValue.getValue()); + } + { + BooleanValueExpressionAdapter booleanValueExpressionAdapter = + new BooleanValueExpressionAdapter(BooleanExpression.FALSE); + BooleanValue resultValue = booleanValueExpressionAdapter.evaluate(null); + assertFalse(resultValue.getValue()); + } + } + + @Test + public void testExpression() { + BooleanValueExpressionAdapter booleanValueExpressionAdapter = + new BooleanValueExpressionAdapter(DSL.eq(DSL.value(1), DSL.value(1))); + BooleanValue resultValue = booleanValueExpressionAdapter.evaluate(null); + assertTrue(resultValue.getValue()); + } + + @Test + public void testNull() { + BooleanValueExpressionAdapter booleanValueExpressionAdapter = + new BooleanValueExpressionAdapter( + new BooleanExpression() { + @Override + public Boolean evaluate(ValueReferenceResolver valueRefResolver) { + return null; + } + }); + EvaluationException ex = + assertThrows(EvaluationException.class, () -> booleanValueExpressionAdapter.evaluate(null)); + assertEquals("Boolean expression returning null", ex.getMessage()); + } +} diff --git a/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/ProbeConditionTest.java b/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/ProbeConditionTest.java index 5a54a75877a..c9dd388426e 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/ProbeConditionTest.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/ProbeConditionTest.java @@ -180,6 +180,19 @@ void testIncorrectSyntax() { assertEquals("Unsupported operation 'gte'", ex.getMessage()); } + @Test + void redaction() throws IOException { + ProbeCondition probeCondition = load("/test_conditional_09.json"); + Map args = new HashMap<>(); + args.put("password", "secret123"); + ValueReferenceResolver ctx = RefResolverHelper.createResolver(args, null, null); + EvaluationException evaluationException = + assertThrows(EvaluationException.class, () -> probeCondition.execute(ctx)); + assertEquals( + "Could not evaluate the expression because 'password' was redacted", + evaluationException.getMessage()); + } + private static ProbeCondition load(String resourcePath) throws IOException { InputStream input = ProbeConditionTest.class.getResourceAsStream(resourcePath); Moshi moshi = diff --git a/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/expressions/ValueRefExpressionTest.java b/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/expressions/ValueRefExpressionTest.java index 4048f3293b7..be89476be39 100644 --- a/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/expressions/ValueRefExpressionTest.java +++ b/dd-java-agent/agent-debugger/debugger-el/src/test/java/com/datadog/debugger/el/expressions/ValueRefExpressionTest.java @@ -5,6 +5,7 @@ import static org.junit.jupiter.api.Assertions.*; import com.datadog.debugger.el.DSL; +import com.datadog.debugger.el.EvaluationException; import com.datadog.debugger.el.RefResolverHelper; import com.datadog.debugger.el.Value; import datadog.trace.bootstrap.debugger.el.ValueReferenceResolver; @@ -99,4 +100,24 @@ void contextRef() { assertEquals("Cannot find synthetic var: invalid", runtimeException.getMessage()); assertEquals("@invalid", print(invalidExpression)); } + + @Test + public void redacted() { + ValueRefExpression valueRef = new ValueRefExpression("password"); + class StoreSecret { + String password; + + public StoreSecret(String password) { + this.password = password; + } + } + StoreSecret instance = new StoreSecret("secret123"); + EvaluationException evaluationException = + assertThrows( + EvaluationException.class, + () -> valueRef.evaluate(RefResolverHelper.createResolver(instance))); + assertEquals( + "Could not evaluate the expression because 'password' was redacted", + evaluationException.getMessage()); + } } diff --git a/dd-java-agent/agent-debugger/debugger-el/src/test/resources/test_conditional_09.json b/dd-java-agent/agent-debugger/debugger-el/src/test/resources/test_conditional_09.json new file mode 100644 index 00000000000..11946a4ee69 --- /dev/null +++ b/dd-java-agent/agent-debugger/debugger-el/src/test/resources/test_conditional_09.json @@ -0,0 +1,6 @@ +{ + "dsl": "password == 'secret123'", + "json": { + "eq": [{"ref": "password"}, "secret123"] + } +} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/ConfigurationUpdater.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/ConfigurationUpdater.java index 98334a2764a..533d78e2a83 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/ConfigurationUpdater.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/ConfigurationUpdater.java @@ -40,7 +40,8 @@ public interface TransformerSupplier { DebuggerTransformer supply( Config tracerConfig, Configuration configuration, - DebuggerTransformer.InstrumentationListener listener); + DebuggerTransformer.InstrumentationListener listener, + DebuggerSink debuggerSink); } private static final Logger LOGGER = LoggerFactory.getLogger(ConfigurationUpdater.class); @@ -103,13 +104,14 @@ private void applyNewConfiguration(Configuration newConfiguration) { ConfigurationComparer changes = new ConfigurationComparer(currentConfiguration, newConfiguration, instrumentationResults); currentConfiguration = newConfiguration; + if (changes.hasRateLimitRelatedChanged()) { + // apply rate limit config first to avoid racing with execution/instrumentation of log probes + applyRateLimiter(changes); + } if (changes.hasProbeRelatedChanges()) { LOGGER.info("Applying new probe configuration, changes: {}", changes); handleProbesChanges(changes); } - if (changes.hasRateLimitRelatedChanged()) { - applyRateLimiter(changes); - } } private Configuration applyConfigurationFilters(Configuration configuration) { @@ -175,13 +177,16 @@ private void installNewDefinitions() { // install new probe definitions currentTransformer = transformerSupplier.supply( - Config.get(), currentConfiguration, this::recordInstrumentationProgress); + Config.get(), currentConfiguration, this::recordInstrumentationProgress, sink); instrumentation.addTransformer(currentTransformer, true); LOGGER.debug("New transformer installed"); } private void recordInstrumentationProgress( ProbeDefinition definition, InstrumentationResult instrumentationResult) { + if (instrumentationResult.isError()) { + return; + } instrumentationResults.put(definition.getId(), instrumentationResult); if (instrumentationResult.isInstalled()) { sink.addInstalled(definition.getProbeId()); @@ -241,10 +246,6 @@ public ProbeImplementation resolve(String id, Class callingClass) { } private void applyRateLimiter(ConfigurationComparer changes) { - Collection probes = currentConfiguration.getLogProbes(); - if (probes == null) { - return; - } // ensure rate is up-to-date for all new probes for (ProbeDefinition addedDefinitions : changes.getAddedDefinitions()) { if (addedDefinitions instanceof LogProbe) { diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerAgent.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerAgent.java index c8a4238e6ad..7d4a888805d 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerAgent.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerAgent.java @@ -4,6 +4,7 @@ import com.datadog.debugger.sink.DebuggerSink; import com.datadog.debugger.sink.Sink; +import com.datadog.debugger.symbol.SymbolExtractionTransformer; import com.datadog.debugger.uploader.BatchUploader; import datadog.communication.ddagent.DDAgentFeaturesDiscovery; import datadog.communication.ddagent.SharedCommunicationObjects; @@ -12,6 +13,7 @@ import datadog.remoteconfig.SizeCheckedInputStream; import datadog.trace.api.Config; import datadog.trace.bootstrap.debugger.DebuggerContext; +import datadog.trace.bootstrap.debugger.util.Redaction; import java.io.ByteArrayOutputStream; import java.io.FileInputStream; import java.io.IOException; @@ -21,7 +23,6 @@ import java.lang.ref.WeakReference; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.Objects; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,14 +44,11 @@ public static synchronized void run( log.info("Starting Dynamic Instrumentation"); ClassesToRetransformFinder classesToRetransformFinder = new ClassesToRetransformFinder(); setupSourceFileTracking(instrumentation, classesToRetransformFinder); - String finalDebuggerSnapshotUrl = config.getFinalDebuggerSnapshotUrl(); - String agentUrl = config.getAgentUrl(); - boolean isSnapshotUploadThroughAgent = Objects.equals(finalDebuggerSnapshotUrl, agentUrl); - + Redaction.addUserDefinedKeywords(config); + Redaction.addUserDefinedTypes(config); DDAgentFeaturesDiscovery ddAgentFeaturesDiscovery = sco.featuresDiscovery(config); ddAgentFeaturesDiscovery.discoverIfOutdated(); agentVersion = ddAgentFeaturesDiscovery.getVersion(); - DebuggerSink debuggerSink = new DebuggerSink(config); debuggerSink.start(); ConfigurationUpdater configurationUpdater = @@ -68,21 +66,18 @@ public static synchronized void run( DebuggerContext.initValueSerializer(snapshotSerializer); DebuggerContext.initTracer(new DebuggerTracer()); if (config.isDebuggerInstrumentTheWorld()) { - setupInstrumentTheWorldTransformer(config, instrumentation, sink, statsdMetricForwarder); + setupInstrumentTheWorldTransformer( + config, instrumentation, debuggerSink, statsdMetricForwarder); } - String probeFileLocation = config.getDebuggerProbeFileLocation(); - if (probeFileLocation != null) { Path probeFilePath = Paths.get(probeFileLocation); loadFromFile(probeFilePath, configurationUpdater, config.getDebuggerMaxPayloadSize()); return; } - configurationPoller = sco.configurationPoller(config); if (configurationPoller != null) { subscribeConfigurationPoller(config, configurationUpdater); - try { /* Note: shutdown hooks are tricky because JVM holds reference for them forever preventing @@ -97,6 +92,10 @@ public static synchronized void run( } else { log.debug("No configuration poller available from SharedCommunicationObjects"); } + if (config.isDebuggerSymbolEnabled() && config.isDebuggerSymbolForceUpload()) { + instrumentation.addTransformer( + new SymbolExtractionTransformer(debuggerSink.getSymbolSink(), config)); + } } private static void setupSourceFileTracking( @@ -137,11 +136,11 @@ private static void subscribeConfigurationPoller( static ClassFileTransformer setupInstrumentTheWorldTransformer( Config config, Instrumentation instrumentation, - Sink sink, + DebuggerSink debuggerSink, StatsdMetricForwarder statsdMetricForwarder) { log.info("install Instrument-The-World transformer"); DebuggerTransformer transformer = - createTransformer(config, Configuration.builder().build(), null); + createTransformer(config, Configuration.builder().build(), null, debuggerSink); DebuggerContext.init(transformer::instrumentTheWorldResolver, statsdMetricForwarder); instrumentation.addTransformer(transformer); return transformer; @@ -158,8 +157,9 @@ public static Sink getSink() { private static DebuggerTransformer createTransformer( Config config, Configuration configuration, - DebuggerTransformer.InstrumentationListener listener) { - return new DebuggerTransformer(config, configuration, listener); + DebuggerTransformer.InstrumentationListener listener, + DebuggerSink debuggerSink) { + return new DebuggerTransformer(config, configuration, listener, debuggerSink); } static void stop() { diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerTracer.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerTracer.java index 74c623a5726..11a18fdcaff 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerTracer.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerTracer.java @@ -1,5 +1,7 @@ package com.datadog.debugger.agent; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.NOOP_TRACER; + import datadog.trace.bootstrap.debugger.DebuggerContext; import datadog.trace.bootstrap.debugger.DebuggerSpan; import datadog.trace.bootstrap.instrumentation.api.AgentScope; @@ -8,12 +10,12 @@ import datadog.trace.bootstrap.instrumentation.api.ScopeSource; public class DebuggerTracer implements DebuggerContext.Tracer { - private static final String OPERATION_NAME = "dd.dynamic.span"; + public static final String OPERATION_NAME = "dd.dynamic.span"; @Override public DebuggerSpan createSpan(String resourceName, String[] tags) { AgentTracer.TracerAPI tracerAPI = AgentTracer.get(); - if (tracerAPI == null) { + if (tracerAPI == null || tracerAPI == NOOP_TRACER) { return DebuggerSpan.NOOP_SPAN; } AgentSpan dynamicSpan = diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerTransformer.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerTransformer.java index 4419a498f5b..fdf5430a19d 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerTransformer.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DebuggerTransformer.java @@ -7,12 +7,15 @@ import com.datadog.debugger.instrumentation.InstrumentationResult; import com.datadog.debugger.probe.LogProbe; import com.datadog.debugger.probe.ProbeDefinition; +import com.datadog.debugger.probe.SpanDecorationProbe; import com.datadog.debugger.probe.Where; +import com.datadog.debugger.sink.DebuggerSink; import com.datadog.debugger.util.ExceptionHelper; import datadog.trace.agent.tooling.AgentStrategies; import datadog.trace.api.Config; import datadog.trace.bootstrap.debugger.ProbeId; import datadog.trace.bootstrap.debugger.ProbeImplementation; +import datadog.trace.util.Strings; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; @@ -59,14 +62,16 @@ public class DebuggerTransformer implements ClassFileTransformer { private static final Logger log = LoggerFactory.getLogger(DebuggerTransformer.class); private static final String CANNOT_FIND_METHOD = "Cannot find method %s::%s"; + private static final String INSTRUMENTATION_FAILS = "Instrumentation fails for %s"; private static final String CANNOT_FIND_LINE = "No executable code was found at %s:L%s"; private static final Pattern COMMA_PATTERN = Pattern.compile(","); private final Config config; - private final TransformerDefinitionMatcher definitonMatcher; + private final TransformerDefinitionMatcher definitionMatcher; private final AllowListHelper allowListHelper; private final DenyListHelper denyListHelper; private final InstrumentationListener listener; + private final DebuggerSink debuggerSink; private final boolean instrumentTheWorld; private final Set excludeClasses = new HashSet<>(); private final Trie excludeTrie = new Trie(); @@ -77,12 +82,16 @@ public interface InstrumentationListener { } public DebuggerTransformer( - Config config, Configuration configuration, InstrumentationListener listener) { + Config config, + Configuration configuration, + InstrumentationListener listener, + DebuggerSink debuggerSink) { this.config = config; - this.definitonMatcher = new TransformerDefinitionMatcher(configuration); + this.definitionMatcher = new TransformerDefinitionMatcher(configuration); this.allowListHelper = new AllowListHelper(configuration.getAllowList()); this.denyListHelper = new DenyListHelper(configuration.getDenyList()); this.listener = listener; + this.debuggerSink = debuggerSink; this.instrumentTheWorld = config.isDebuggerInstrumentTheWorld(); if (this.instrumentTheWorld) { instrumentTheWorldProbes = new ConcurrentHashMap<>(); @@ -92,8 +101,9 @@ public DebuggerTransformer( } } - public DebuggerTransformer(Config config, Configuration configuration) { - this(config, configuration, null); + // Used only for tests + DebuggerTransformer(Config config, Configuration configuration) { + this(config, configuration, null, new DebuggerSink(config)); } private void readExcludeFiles(String commaSeparatedFileNames) { @@ -140,10 +150,11 @@ public byte[] transform( if (skipInstrumentation(loader, classFilePath)) { return null; } + List definitions = Collections.emptyList(); + String fullyQualifiedClassName = classFilePath.replace('/', '.'); try { - String fullyQualifiedClassName = classFilePath.replace('/', '.'); - List definitions = - definitonMatcher.match( + definitions = + definitionMatcher.match( classBeingRedefined, classFilePath, fullyQualifiedClassName, classfileBuffer); if (definitions.isEmpty()) { return null; @@ -157,7 +168,7 @@ public byte[] transform( boolean transformed = performInstrumentation(loader, fullyQualifiedClassName, defByLocation, classNode); if (transformed) { - return writeClassFile(loader, classFilePath, classNode); + return writeClassFile(definitions, loader, classFilePath, classNode); } // This is an info log because in case of SourceFile definition and multiple top-level // classes, type may match, but there is one classfile per top-level class so source file @@ -167,6 +178,7 @@ public byte[] transform( "type {} matched but no transformation for definitions: {}", classFilePath, definitions); } catch (Throwable ex) { log.warn("Cannot transform: ", ex); + reportInstrumentationFails(definitions, fullyQualifiedClassName); } return null; } @@ -182,7 +194,7 @@ private Map> mergeLocations(List d } private boolean skipInstrumentation(ClassLoader loader, String classFilePath) { - if (definitonMatcher.isEmpty()) { + if (definitionMatcher.isEmpty()) { log.warn("No debugger definitions present."); return true; } @@ -246,7 +258,7 @@ private byte[] transformTheWorld( Map> defByLocation = mergeLocations(probes); boolean transformed = performInstrumentation(loader, classFilePath, defByLocation, classNode); if (transformed) { - return writeClassFile(loader, classFilePath, classNode); + return writeClassFile(probes, loader, classFilePath, classNode); } } catch (Throwable ex) { log.warn("Cannot transform: ", ex); @@ -318,17 +330,24 @@ private ClassNode parseClassFile(String classFilePath, byte[] classfileBuffer) { return classNode; } - private byte[] writeClassFile(ClassLoader loader, String classFilePath, ClassNode classNode) { + private byte[] writeClassFile( + List definitions, + ClassLoader loader, + String classFilePath, + ClassNode classNode) { if (classNode.version < Opcodes.V1_8) { // Class file version must be at least 1.8 (52) classNode.version = Opcodes.V1_8; } ClassWriter writer = new SafeClassWriter(loader); - log.debug("Generating bytecode for class: {}", classFilePath.replace('/', '.')); + + log.debug("Generating bytecode for class: {}", Strings.getClassName(classFilePath)); try { classNode.accept(writer); } catch (Throwable t) { - log.error("Cannot write classfile for class: {}", classFilePath, t); + log.error("Cannot write classfile for class: {} Exception: ", classFilePath, t); + reportInstrumentationFails(definitions, Strings.getClassName(classFilePath)); + return null; } byte[] data = writer.toByteArray(); dumpInstrumentedClassFile(classFilePath, data); @@ -337,33 +356,34 @@ private byte[] writeClassFile(ClassLoader loader, String classFilePath, ClassNod } private void verifyByteCode(String classFilePath, byte[] classFile) { - if (config.isDebuggerVerifyByteCode()) { - StringWriter stringWriter = new StringWriter(); - PrintWriter printWriter = new PrintWriter(stringWriter); - ClassReader classReader = new ClassReader(classFile); - ClassNode classNode = new ClassNode(); - classReader.accept( - new CheckClassAdapter(Opcodes.ASM7, classNode, false) {}, ClassReader.SKIP_DEBUG); - List methods = classNode.methods; - for (MethodNode method : methods) { - BasicVerifier verifier = new BasicVerifier(); - Analyzer analyzer = new Analyzer<>(verifier); - try { - analyzer.analyze(classNode.name, method); - } catch (AnalyzerException e) { - printWriter.printf( - "Error analyzing method '%s.%s%s':%n", classNode.name, method.name, method.desc); - e.printStackTrace(printWriter); - } - } - printWriter.flush(); - String result = stringWriter.toString(); - if (!result.isEmpty()) { - log.warn("Verification of instrumented class {} failed", classFilePath); - log.debug("Verify result: {}", stringWriter); - throw new RuntimeException("Generated bydecode is invalid for " + classFilePath); + if (!config.isDebuggerVerifyByteCode()) { + return; + } + StringWriter stringWriter = new StringWriter(); + PrintWriter printWriter = new PrintWriter(stringWriter); + ClassReader classReader = new ClassReader(classFile); + ClassNode classNode = new ClassNode(); + classReader.accept( + new CheckClassAdapter(Opcodes.ASM7, classNode, false) {}, ClassReader.SKIP_DEBUG); + List methods = classNode.methods; + for (MethodNode method : methods) { + BasicVerifier verifier = new BasicVerifier(); + Analyzer analyzer = new Analyzer<>(verifier); + try { + analyzer.analyze(classNode.name, method); + } catch (AnalyzerException e) { + printWriter.printf( + "Error analyzing method '%s.%s%s':%n", classNode.name, method.name, method.desc); + e.printStackTrace(printWriter); } } + printWriter.flush(); + String result = stringWriter.toString(); + if (!result.isEmpty()) { + log.warn("Verification of instrumented class {} failed", classFilePath); + log.debug("Verify result: {}", stringWriter); + throw new RuntimeException("Generated bydecode is invalid for " + classFilePath); + } } private boolean performInstrumentation( @@ -403,43 +423,61 @@ private boolean performInstrumentation( InstrumentationResult result = applyInstrumentation(loader, classNode, definitions, methodNode); transformed |= result.isInstalled(); - for (ProbeDefinition definition : definitions) { - definition.buildLocation(result); - if (listener != null) { - listener.instrumentationResult(definition, result); - } - List diagnosticMessages = - result.getDiagnostics().get(definition.getProbeId()); - if (!result.getDiagnostics().isEmpty()) { - DebuggerAgent.getSink().addDiagnostics(definition.getProbeId(), diagnosticMessages); - } - } + handleInstrumentationResult(definitions, result); } } return transformed; } + private void handleInstrumentationResult( + List definitions, InstrumentationResult result) { + for (ProbeDefinition definition : definitions) { + definition.buildLocation(result); + if (listener != null) { + listener.instrumentationResult(definition, result); + } + List diagnosticMessages = + result.getDiagnostics().get(definition.getProbeId()); + if (!result.getDiagnostics().isEmpty()) { + addDiagnostics(definition, diagnosticMessages); + } + if (result.isInstalled()) { + debuggerSink.addInstalled(definition.getProbeId()); + } else if (result.isBlocked()) { + debuggerSink.addBlocked(definition.getProbeId()); + } + } + } + private void reportLocationNotFound( List definitions, String className, String methodName) { - String format; - String location; if (methodName != null) { - format = CANNOT_FIND_METHOD; - location = methodName; - } else { - // This is a line probe, so we don't report line not found because the line may be found later - // on a separate class files because probe was set on an inner/top-level class + reportErrorForAllProbes(definitions, CANNOT_FIND_METHOD, className, methodName); return; } + // This is a line probe, so we don't report line not found because the line may be found later + // on a separate class files because probe was set on an inner/top-level class + } + + private void reportInstrumentationFails(List definitions, String className) { + reportErrorForAllProbes(definitions, INSTRUMENTATION_FAILS, className, null); + } + + private void reportErrorForAllProbes( + List definitions, String format, String className, String location) { String msg = String.format(format, className, location); DiagnosticMessage diagnosticMessage = new DiagnosticMessage(DiagnosticMessage.Kind.ERROR, msg); for (ProbeDefinition definition : definitions) { - DebuggerAgent.getSink() - .addDiagnostics(definition.getProbeId(), singletonList(diagnosticMessage)); - log.debug("{} for definition: {}", msg, definition); + addDiagnostics(definition, singletonList(diagnosticMessage)); } } + private void addDiagnostics( + ProbeDefinition definition, List diagnosticMessages) { + debuggerSink.addDiagnostics(definition.getProbeId(), diagnosticMessages); + log.debug("Diagnostic messages for definition[{}]: {}", definition, diagnosticMessages); + } + private void notifyBlockedDefinitions( List definitions, InstrumentationResult result) { if (listener != null) { @@ -461,21 +499,26 @@ private InstrumentationResult applyInstrumentation( preCheckInstrumentation(diagnostics, classLoader, methodNode); if (status != InstrumentationResult.Status.ERROR) { try { - List logProbes = new ArrayList<>(); + List capturedContextProbes = new ArrayList<>(); for (ProbeDefinition definition : definitions) { - if (definition instanceof LogProbe) { - logProbes.add(definition); + // Log and span decoration probe shared the same instrumentor: CaptureContextInstrumentor + // and therefore need to be instrumented once + if (definition instanceof LogProbe || definition instanceof SpanDecorationProbe) { + capturedContextProbes.add(definition); } else { List probeDiagnostics = diagnostics.get(definition.getProbeId()); - definition.instrument(classLoader, classNode, methodNode, probeDiagnostics); + status = definition.instrument(classLoader, classNode, methodNode, probeDiagnostics); } } - if (logProbes.size() > 0) { - List probesIds = logProbes.stream().map(ProbeDefinition::getId).collect(toList()); - List probeDiagnostics = diagnostics.get(logProbes.get(0).getProbeId()); - logProbes - .get(0) - .instrument(classLoader, classNode, methodNode, probeDiagnostics, probesIds); + if (capturedContextProbes.size() > 0) { + List probesIds = + capturedContextProbes.stream().map(ProbeDefinition::getId).collect(toList()); + ProbeDefinition referenceDefinition = selectReferenceDefinition(capturedContextProbes); + List probeDiagnostics = + diagnostics.get(referenceDefinition.getProbeId()); + status = + referenceDefinition.instrument( + classLoader, classNode, methodNode, probeDiagnostics, probesIds); } } catch (Throwable t) { log.warn("Exception during instrumentation: ", t); @@ -487,6 +530,20 @@ private InstrumentationResult applyInstrumentation( return new InstrumentationResult(status, diagnostics, classNode, methodNode); } + // Log & Span Decoration probes share the same instrumentor so only one definition should be + // selected to + // generate the instrumentation. Log probes needs capture limits provided by the configuration + // so if the list of definition contains at least 1 log probe this is the log probe that need to + // be picked. + // TODO: handle the conflicting limits for log probes + mixing CaptureSnapshot or not + private ProbeDefinition selectReferenceDefinition(List capturedContextProbes) { + ProbeDefinition first = capturedContextProbes.get(0); + return capturedContextProbes.stream() + .filter(it -> it instanceof LogProbe) + .findFirst() + .orElse(first); + } + private InstrumentationResult.Status preCheckInstrumentation( Map> diagnostics, ClassLoader classLoader, diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DenyListHelper.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DenyListHelper.java index daa443cbb78..030e1eae124 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DenyListHelper.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/DenyListHelper.java @@ -1,6 +1,7 @@ package com.datadog.debugger.agent; import datadog.trace.bootstrap.debugger.DebuggerContext; +import datadog.trace.bootstrap.debugger.util.Redaction; import datadog.trace.util.ClassNameTrie; import java.util.ArrayList; import java.util.Arrays; @@ -21,6 +22,8 @@ public class DenyListHelper implements DebuggerContext.ClassFilter { public DenyListHelper(Configuration.FilterList denyList) { Collection packages = new ArrayList<>(DENIED_PACKAGES); Collection classes = new ArrayList<>(DENIED_CLASSES); + packages.addAll(Redaction.getRedactedPackages()); + classes.addAll(Redaction.getRedactedClasses()); if (denyList != null) { packages.addAll(denyList.getPackagePrefixes()); classes.addAll(denyList.getClasses()); diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/JsonSnapshotSerializer.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/JsonSnapshotSerializer.java index 5708b3ceb75..f2627d581eb 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/JsonSnapshotSerializer.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/JsonSnapshotSerializer.java @@ -45,14 +45,14 @@ private void handleDuration(Snapshot snapshot, IntakeRequest request) { } private void handleCorrelationFields(Snapshot snapshot, IntakeRequest request) { + request.traceId = snapshot.getTraceId(); + request.spanId = snapshot.getSpanId(); CapturedContext entry = snapshot.getCaptures().getEntry(); if (entry != null) { - addTraceSpanId(entry, request); removeTraceSpanId(entry); } if (snapshot.getCaptures().getLines() != null) { for (CapturedContext context : snapshot.getCaptures().getLines().values()) { - addTraceSpanId(context, request); removeTraceSpanId(context); } } @@ -71,11 +71,6 @@ private void removeTraceSpanId(CapturedContext context) { fields.remove(DD_SPAN_ID); } - private void addTraceSpanId(CapturedContext context, IntakeRequest request) { - request.traceId = context.getTraceId(); - request.spanId = context.getSpanId(); - } - public static class IntakeRequest { private final String service; private final DebuggerIntakeRequestData debugger; diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/LogMessageTemplateBuilder.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/LogMessageTemplateBuilder.java index 4554630e8eb..6b314725870 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/LogMessageTemplateBuilder.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/agent/LogMessageTemplateBuilder.java @@ -3,11 +3,13 @@ import static com.datadog.debugger.util.ValueScriptHelper.serializeValue; import com.datadog.debugger.el.EvaluationException; +import com.datadog.debugger.el.RedactedException; import com.datadog.debugger.el.Value; import com.datadog.debugger.el.ValueScript; import com.datadog.debugger.probe.LogProbe; import datadog.trace.bootstrap.debugger.CapturedContext; import datadog.trace.bootstrap.debugger.EvaluationError; +import datadog.trace.bootstrap.debugger.util.Redaction; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,7 +51,9 @@ public String evaluate(CapturedContext context, LogProbe.LogStatus status) { LOGGER.debug("Evaluation error: ", ex); status.addError(new EvaluationError(ex.getExpr(), ex.getMessage())); status.setLogTemplateErrors(true); - sb.append("{").append(ex.getMessage()).append("}"); + String msg = + ex instanceof RedactedException ? Redaction.REDACTED_VALUE : ex.getMessage(); + sb.append("{").append(msg).append("}"); } } } diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/ASMHelper.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/ASMHelper.java index 3edb9a464f0..eb32212e0fc 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/ASMHelper.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/ASMHelper.java @@ -9,6 +9,7 @@ import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; import org.objectweb.asm.Opcodes; @@ -20,6 +21,7 @@ import org.objectweb.asm.tree.InsnList; import org.objectweb.asm.tree.InsnNode; import org.objectweb.asm.tree.LdcInsnNode; +import org.objectweb.asm.tree.LocalVariableNode; import org.objectweb.asm.tree.MethodInsnNode; import org.objectweb.asm.tree.TypeInsnNode; @@ -203,6 +205,43 @@ private static String getReflectiveMethodName(int sort) { } } + public static List sortLocalVariables(List localVariables) { + List sortedLocalVars = new ArrayList<>(localVariables); + sortedLocalVars.sort(Comparator.comparingInt(o -> o.index)); + return sortedLocalVars; + } + + public static LocalVariableNode[] createLocalVarNodes(List sortedLocalVars) { + int maxIndex = sortedLocalVars.get(sortedLocalVars.size() - 1).index; + LocalVariableNode[] localVars = new LocalVariableNode[maxIndex + 1]; + for (LocalVariableNode localVariableNode : sortedLocalVars) { + localVars[localVariableNode.index] = localVariableNode; + } + return localVars; + } + + public static void adjustLocalVarsBasedOnArgs( + boolean isStatic, + LocalVariableNode[] localVars, + org.objectweb.asm.Type[] argTypes, + List sortedLocalVars) { + // assume that first local variables matches method arguments + // as stated into the JVM spec: + // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-2.html#jvms-2.6.1 + // so we reassigned local var in arg slots if they are empty + if (argTypes.length < localVars.length) { + int slot = isStatic ? 0 : 1; + int localVarTableIdx = slot; + for (org.objectweb.asm.Type t : argTypes) { + if (localVars[slot] == null && localVarTableIdx < sortedLocalVars.size()) { + localVars[slot] = sortedLocalVars.get(localVarTableIdx); + } + slot += t.getSize(); + localVarTableIdx++; + } + } + } + /** Wraps ASM's {@link org.objectweb.asm.Type} with associated generic types */ public static class Type { private final org.objectweb.asm.Type mainType; diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/CapturedContextInstrumentor.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/CapturedContextInstrumentor.java index 6b810377e25..f728e14eb30 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/CapturedContextInstrumentor.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/CapturedContextInstrumentor.java @@ -78,17 +78,21 @@ public CapturedContextInstrumentor( } @Override - public void instrument() { + public InstrumentationResult.Status instrument() { if (isLineProbe) { fillLineMap(); - addLineCaptures(lineMap); - } else { - instrumentMethodEnter(); - instrumentTryCatchHandlers(); - processInstructions(); - addFinallyHandler(returnHandlerLabel); + if (!addLineCaptures(lineMap)) { + return InstrumentationResult.Status.ERROR; + } + installFinallyBlocks(); + return InstrumentationResult.Status.INSTALLED; } + instrumentMethodEnter(); + instrumentTryCatchHandlers(); + processInstructions(); + addFinallyHandler(returnHandlerLabel); installFinallyBlocks(); + return InstrumentationResult.Status.INSTALLED; } private void installFinallyBlocks() { @@ -99,15 +103,15 @@ private void installFinallyBlocks() { } } - private void addLineCaptures(LineMap lineMap) { + private boolean addLineCaptures(LineMap lineMap) { Where.SourceLine[] targetLines = definition.getWhere().getSourceLines(); if (targetLines == null) { - // no line capture to perform - return; + reportError("Missing line(s) in probe definition."); + return false; } if (lineMap.isEmpty()) { reportError("Missing line debug information."); - return; + return false; } for (Where.SourceLine sourceLine : targetLines) { int from = sourceLine.getFrom(); @@ -123,6 +127,8 @@ private void addLineCaptures(LineMap lineMap) { } if (beforeLabel != null) { InsnList insnList = new InsnList(); + ldc(insnList, Type.getObjectType(classNode.name)); + // stack [class, array] pushProbesIds(insnList); // stack [array] invokeStatic( @@ -130,6 +136,7 @@ private void addLineCaptures(LineMap lineMap) { DEBUGGER_CONTEXT_TYPE, "isReadyToCapture", Type.BOOLEAN_TYPE, + CLASS_TYPE, STRING_ARRAY_TYPE); // stack [boolean] LabelNode targetNode = new LabelNode(); @@ -167,6 +174,7 @@ private void addLineCaptures(LineMap lineMap) { methodNode.instructions.insert(afterLabel, insnList); } } + return true; } @Override @@ -309,10 +317,17 @@ private void instrumentMethodEnter() { methodNode.instructions.insert(methodEnterLabel, insnList); return; } + ldc(insnList, Type.getObjectType(classNode.name)); + // stack [class] pushProbesIds(insnList); - // stack [array] + // stack [class, array] invokeStatic( - insnList, DEBUGGER_CONTEXT_TYPE, "isReadyToCapture", Type.BOOLEAN_TYPE, STRING_ARRAY_TYPE); + insnList, + DEBUGGER_CONTEXT_TYPE, + "isReadyToCapture", + Type.BOOLEAN_TYPE, + CLASS_TYPE, + STRING_ARRAY_TYPE); // stack [boolean] LabelNode targetNode = new LabelNode(); LabelNode gotoNode = new LabelNode(); diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/InstrumentationResult.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/InstrumentationResult.java index a752a11d75b..134a542abd5 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/InstrumentationResult.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/InstrumentationResult.java @@ -56,6 +56,10 @@ public InstrumentationResult( this.methodName = methodName; } + public boolean isError() { + return status == Status.ERROR; + } + public boolean isBlocked() { return status == Status.BLOCKED; } diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/Instrumentor.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/Instrumentor.java index cdd6a02ec18..cda1012b299 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/Instrumentor.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/Instrumentor.java @@ -1,14 +1,15 @@ package com.datadog.debugger.instrumentation; +import static com.datadog.debugger.instrumentation.ASMHelper.adjustLocalVarsBasedOnArgs; +import static com.datadog.debugger.instrumentation.ASMHelper.createLocalVarNodes; import static com.datadog.debugger.instrumentation.ASMHelper.ldc; +import static com.datadog.debugger.instrumentation.ASMHelper.sortLocalVariables; import static com.datadog.debugger.instrumentation.Types.STRING_TYPE; import com.datadog.debugger.instrumentation.DiagnosticMessage.Kind; import com.datadog.debugger.probe.ProbeDefinition; import com.datadog.debugger.probe.Where; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Comparator; import java.util.List; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; @@ -69,35 +70,16 @@ public Instrumentor( localVarsBySlot = extractLocalVariables(argTypes); } - public abstract void instrument(); + public abstract InstrumentationResult.Status instrument(); private LocalVariableNode[] extractLocalVariables(Type[] argTypes) { if (methodNode.localVariables == null || methodNode.localVariables.isEmpty()) { return new LocalVariableNode[0]; } - List sortedLocalVars = new ArrayList<>(methodNode.localVariables); - sortedLocalVars.sort(Comparator.comparingInt(o -> o.index)); - int maxIndex = sortedLocalVars.get(sortedLocalVars.size() - 1).index; - LocalVariableNode[] localVars = new LocalVariableNode[maxIndex + 1]; + List sortedLocalVars = sortLocalVariables(methodNode.localVariables); + LocalVariableNode[] localVars = createLocalVarNodes(sortedLocalVars); + adjustLocalVarsBasedOnArgs(isStatic, localVars, argTypes, sortedLocalVars); localVarBaseOffset = sortedLocalVars.get(0).index; - for (LocalVariableNode localVariableNode : sortedLocalVars) { - localVars[localVariableNode.index] = localVariableNode; - } - // assume that first local variables matches method arguments - // as stated into the JVM spec: - // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-2.html#jvms-2.6.1 - // so we reassigned local var in arg slots if they are empty - if (argTypes.length < localVars.length) { - int slot = isStatic ? 0 : 1; - int localVarTableIdx = slot; - for (Type t : argTypes) { - if (localVars[slot] == null && localVarTableIdx < sortedLocalVars.size()) { - localVars[slot] = sortedLocalVars.get(localVarTableIdx); - } - slot += t.getSize(); - localVarTableIdx++; - } - } return localVars; } diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/LogInstrumentor.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/LogInstrumentor.java deleted file mode 100644 index 7b234703b3b..00000000000 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/LogInstrumentor.java +++ /dev/null @@ -1,37 +0,0 @@ -package com.datadog.debugger.instrumentation; - -import static com.datadog.debugger.probe.LogProbe.Capture.toLimits; - -import com.datadog.debugger.probe.LogProbe; -import java.util.List; -import org.objectweb.asm.tree.ClassNode; -import org.objectweb.asm.tree.MethodNode; - -/** Handles generating instrumentation for snapshot/log method & line probes */ -public final class LogInstrumentor extends CapturedContextInstrumentor { - private final LogProbe.Capture capture; - - public LogInstrumentor( - LogProbe logProbe, - ClassLoader classLoader, - ClassNode classNode, - MethodNode methodNode, - List diagnostics, - List probeIds) { - super( - logProbe, - classLoader, - classNode, - methodNode, - diagnostics, - probeIds, - logProbe.isCaptureSnapshot(), - toLimits(logProbe.getCapture())); - this.capture = logProbe.getCapture(); - } - - @Override - public void instrument() { - super.instrument(); - } -} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/MetricInstrumentor.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/MetricInstrumentor.java index ae944a4bc6f..38642365cc4 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/MetricInstrumentor.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/MetricInstrumentor.java @@ -89,29 +89,29 @@ public MetricInstrumentor( } @Override - public void instrument() { + public InstrumentationResult.Status instrument() { if (isLineProbe) { fillLineMap(); - addLineMetric(lineMap); - } else { - switch (definition.getEvaluateAt()) { - case ENTRY: - case DEFAULT: - { - InsnList insnList = wrapTryCatch(callMetric(metricProbe)); - methodNode.instructions.insert(methodEnterLabel, insnList); - break; - } - case EXIT: - { - processInstructions(); - break; - } - default: - throw new IllegalArgumentException( - "Invalid evaluateAt attribute: " + definition.getEvaluateAt()); - } + return addLineMetric(lineMap); + } + switch (definition.getEvaluateAt()) { + case ENTRY: + case DEFAULT: + { + InsnList insnList = wrapTryCatch(callMetric(metricProbe)); + methodNode.instructions.insert(methodEnterLabel, insnList); + break; + } + case EXIT: + { + processInstructions(); + break; + } + default: + throw new IllegalArgumentException( + "Invalid evaluateAt attribute: " + definition.getEvaluateAt()); } + return InstrumentationResult.Status.INSTALLED; } private InsnList wrapTryCatch(InsnList insnList) { @@ -287,15 +287,15 @@ private InsnList callMetric(MetricProbe metricProbe) { return null; } - private void addLineMetric(LineMap lineMap) { + private InstrumentationResult.Status addLineMetric(LineMap lineMap) { Where.SourceLine[] targetLines = metricProbe.getWhere().getSourceLines(); if (targetLines == null) { - // no line capture to perform - return; + reportError("Missing line(s) in probe definition."); + return InstrumentationResult.Status.ERROR; } if (lineMap.isEmpty()) { reportError("Missing line debug information."); - return; + return InstrumentationResult.Status.ERROR; } for (Where.SourceLine sourceLine : targetLines) { int from = sourceLine.getFrom(); @@ -316,6 +316,7 @@ private void addLineMetric(LineMap lineMap) { methodNode.instructions.insert(afterLabel, insnList); } } + return InstrumentationResult.Status.INSTALLED; } private static class VisitorResult { diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/SpanDecorationInstrumentor.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/SpanDecorationInstrumentor.java deleted file mode 100644 index 2c1722a9722..00000000000 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/SpanDecorationInstrumentor.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.datadog.debugger.instrumentation; - -import com.datadog.debugger.probe.SpanDecorationProbe; -import datadog.trace.bootstrap.debugger.Limits; -import java.util.List; -import org.objectweb.asm.tree.ClassNode; -import org.objectweb.asm.tree.MethodNode; - -public class SpanDecorationInstrumentor extends CapturedContextInstrumentor { - public SpanDecorationInstrumentor( - SpanDecorationProbe probe, - ClassLoader classLoader, - ClassNode classNode, - MethodNode methodNode, - List diagnostics, - List probeIds) { - super(probe, classLoader, classNode, methodNode, diagnostics, probeIds, false, Limits.DEFAULT); - } - - @Override - public void instrument() { - super.instrument(); - } -} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/SpanInstrumentor.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/SpanInstrumentor.java index b1083fbd0f1..773ea79cf4e 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/SpanInstrumentor.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/instrumentation/SpanInstrumentor.java @@ -36,25 +36,24 @@ public SpanInstrumentor( } @Override - public void instrument() { + public InstrumentationResult.Status instrument() { if (isLineProbe) { fillLineMap(); - addRangeSpan(lineMap); - } else { - spanVar = newVar(DEBUGGER_SPAN_TYPE); - processInstructions(); - LabelNode initSpanLabel = new LabelNode(); - InsnList insnList = createSpan(initSpanLabel); - LabelNode endLabel = new LabelNode(); - methodNode.instructions.insert(methodNode.instructions.getLast(), endLabel); - - LabelNode handlerLabel = new LabelNode(); - InsnList handler = createCatchHandler(handlerLabel); - methodNode.instructions.add(handler); - methodNode.tryCatchBlocks.add( - new TryCatchBlockNode(initSpanLabel, endLabel, handlerLabel, null)); - methodNode.instructions.insert(methodEnterLabel, insnList); + return addRangeSpan(lineMap); } + spanVar = newVar(DEBUGGER_SPAN_TYPE); + processInstructions(); + LabelNode initSpanLabel = new LabelNode(); + InsnList insnList = createSpan(initSpanLabel); + LabelNode endLabel = new LabelNode(); + methodNode.instructions.insert(methodNode.instructions.getLast(), endLabel); + LabelNode handlerLabel = new LabelNode(); + InsnList handler = createCatchHandler(handlerLabel); + methodNode.instructions.add(handler); + methodNode.tryCatchBlocks.add( + new TryCatchBlockNode(initSpanLabel, endLabel, handlerLabel, null)); + methodNode.instructions.insert(methodEnterLabel, insnList); + return InstrumentationResult.Status.INSTALLED; } private InsnList createCatchHandler(LabelNode handlerLabel) { @@ -94,25 +93,29 @@ private InsnList createSpan(LabelNode initSpanLabel) { return insnList; } - private void addRangeSpan(LineMap lineMap) { + private InstrumentationResult.Status addRangeSpan(LineMap lineMap) { Where.SourceLine[] targetLines = definition.getWhere().getSourceLines(); if (targetLines == null || targetLines.length == 0) { - // no line capture to perform - return; + reportError("Missing line(s) in probe definition."); + return InstrumentationResult.Status.ERROR; } if (lineMap.isEmpty()) { reportError("Missing line debug information."); - return; + return InstrumentationResult.Status.ERROR; } for (Where.SourceLine sourceLine : targetLines) { int from = sourceLine.getFrom(); int till = sourceLine.getTill(); + if (from == till) { + reportError("Single line span is not supported, you need to provide a range."); + return InstrumentationResult.Status.ERROR; + } LabelNode beforeLabel = lineMap.getLineLabel(from); LabelNode afterLabel = lineMap.getLineLabel(till); if (beforeLabel == null || afterLabel == null) { reportError( "No line info for " + (sourceLine.isSingleLine() ? "line " : "range ") + sourceLine); - return; + return InstrumentationResult.Status.ERROR; } spanVar = newVar(DEBUGGER_SPAN_TYPE); LabelNode initSpanLabel = new LabelNode(); @@ -127,6 +130,7 @@ private void addRangeSpan(LineMap lineMap) { debuggerSpanFinish(finishSpanInsnList); methodNode.instructions.insert(afterLabel, finishSpanInsnList); } + return InstrumentationResult.Status.INSTALLED; } @Override diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/LogProbe.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/LogProbe.java index 1ee64681b59..acb71fa7f50 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/LogProbe.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/LogProbe.java @@ -1,13 +1,16 @@ package com.datadog.debugger.probe; +import static com.datadog.debugger.probe.LogProbe.Capture.toLimits; + import com.datadog.debugger.agent.DebuggerAgent; import com.datadog.debugger.agent.Generated; import com.datadog.debugger.agent.LogMessageTemplateBuilder; import com.datadog.debugger.el.EvaluationException; import com.datadog.debugger.el.ProbeCondition; import com.datadog.debugger.el.ValueScript; +import com.datadog.debugger.instrumentation.CapturedContextInstrumentor; import com.datadog.debugger.instrumentation.DiagnosticMessage; -import com.datadog.debugger.instrumentation.LogInstrumentor; +import com.datadog.debugger.instrumentation.InstrumentationResult; import com.datadog.debugger.sink.Sink; import com.datadog.debugger.sink.Snapshot; import com.squareup.moshi.Json; @@ -341,22 +344,36 @@ public Sampling getSampling() { } @Override - public void instrument( + public InstrumentationResult.Status instrument( ClassLoader classLoader, ClassNode classNode, MethodNode methodNode, List diagnostics, List probeIds) { - new LogInstrumentor(this, classLoader, classNode, methodNode, diagnostics, probeIds) + return new CapturedContextInstrumentor( + this, + classLoader, + classNode, + methodNode, + diagnostics, + probeIds, + isCaptureSnapshot(), + toLimits(getCapture())) .instrument(); } @Override - public void evaluate(CapturedContext context, CapturedContext.Status status) { + public void evaluate( + CapturedContext context, CapturedContext.Status status, MethodLocation methodLocation) { if (!(status instanceof LogStatus)) { throw new IllegalStateException("Invalid status: " + status.getClass()); } + LogStatus logStatus = (LogStatus) status; + if (!hasCondition()) { + // sample when no condition associated + sample(logStatus, methodLocation); + } logStatus.setCondition(evaluateCondition(context, logStatus)); CapturedContext.CapturedThrowable throwable = context.getThrowable(); if (logStatus.hasConditionErrors() && throwable != null) { @@ -364,12 +381,29 @@ public void evaluate(CapturedContext context, CapturedContext.Status status) { new EvaluationError( "uncaught exception", throwable.getType() + ": " + throwable.getMessage())); } - if (logStatus.getCondition()) { + if (hasCondition() && logStatus.getCondition()) { + // sample if probe has condition and condition is true + sample(logStatus, methodLocation); + } + if (logStatus.isSampled() && logStatus.getCondition()) { LogMessageTemplateBuilder logMessageBuilder = new LogMessageTemplateBuilder(segments); logStatus.setMessage(logMessageBuilder.evaluate(context, logStatus)); } } + private void sample(LogStatus logStatus, MethodLocation methodLocation) { + // sample only once and when we need to evaluate + if (!MethodLocation.isSame(methodLocation, evaluateAt)) { + return; + } + boolean sampled = ProbeRateLimiter.tryProbe(id); + logStatus.setSampled(sampled); + if (!sampled) { + LOGGER.debug("{} not sampled!", id); + DebuggerAgent.getSink().skipSnapshot(id, DebuggerContext.SkipCause.RATE); + } + } + private boolean evaluateCondition(CapturedContext capture, LogStatus status) { if (probeCondition == null) { return true; @@ -399,13 +433,19 @@ public void commit( LogStatus entryStatus = convertStatus(entryContext.getStatus(id)); LogStatus exitStatus = convertStatus(exitContext.getStatus(id)); String message = null; + String traceId = null; + String spanId = null; switch (evaluateAt) { case ENTRY: case DEFAULT: message = entryStatus.getMessage(); + traceId = entryContext.getTraceId(); + spanId = entryContext.getSpanId(); break; case EXIT: message = exitStatus.getMessage(); + traceId = exitContext.getTraceId(); + spanId = exitContext.getSpanId(); break; } Sink sink = DebuggerAgent.getSink(); @@ -413,13 +453,8 @@ public void commit( int maxDepth = capture != null ? capture.maxReferenceDepth : -1; Snapshot snapshot = new Snapshot(Thread.currentThread(), this, maxDepth); if (entryStatus.shouldSend() && exitStatus.shouldSend()) { - // only rate limit if a condition is defined - if (probeCondition != null) { - if (!ProbeRateLimiter.tryProbe(id)) { - sink.skipSnapshot(id, DebuggerContext.SkipCause.RATE); - return; - } - } + snapshot.setTraceId(traceId); + snapshot.setSpanId(spanId); if (isCaptureSnapshot()) { snapshot.setEntry(entryContext); snapshot.setExit(exitContext); @@ -487,13 +522,8 @@ public void commit(CapturedContext lineContext, int line) { Snapshot snapshot = new Snapshot(Thread.currentThread(), this, maxDepth); boolean shouldCommit = false; if (status.shouldSend()) { - // only rate limit if a condition is defined - if (probeCondition != null) { - if (!ProbeRateLimiter.tryProbe(id)) { - sink.skipSnapshot(id, DebuggerContext.SkipCause.RATE); - return; - } - } + snapshot.setTraceId(lineContext.getTraceId()); + snapshot.setSpanId(lineContext.getSpanId()); if (isCaptureSnapshot()) { snapshot.addLine(lineContext, line); } @@ -533,6 +563,7 @@ public static class LogStatus extends CapturedContext.Status { private boolean condition = true; private boolean hasLogTemplateErrors; private boolean hasConditionErrors; + private boolean sampled = true; private String message; public LogStatus(ProbeImplementation probeImplementation) { @@ -546,7 +577,7 @@ private LogStatus(ProbeImplementation probeImplementation, boolean condition) { @Override public boolean shouldFreezeContext() { - return probeImplementation.isCaptureSnapshot() && shouldSend(); + return sampled && probeImplementation.isCaptureSnapshot() && shouldSend(); } @Override @@ -555,7 +586,7 @@ public boolean isCapturing() { } public boolean shouldSend() { - return condition && !hasConditionErrors; + return sampled && condition && !hasConditionErrors; } public boolean shouldReportError() { @@ -593,6 +624,14 @@ public void setMessage(String message) { public String getMessage() { return message; } + + public void setSampled(boolean sampled) { + this.sampled = sampled; + } + + public boolean isSampled() { + return sampled; + } } @Generated diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/MetricProbe.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/MetricProbe.java index af481422d97..312da6dea68 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/MetricProbe.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/MetricProbe.java @@ -3,6 +3,7 @@ import com.datadog.debugger.agent.Generated; import com.datadog.debugger.el.ValueScript; import com.datadog.debugger.instrumentation.DiagnosticMessage; +import com.datadog.debugger.instrumentation.InstrumentationResult; import com.datadog.debugger.instrumentation.MetricInstrumentor; import datadog.trace.bootstrap.debugger.MethodLocation; import datadog.trace.bootstrap.debugger.ProbeId; @@ -135,13 +136,13 @@ public ValueScript getValue() { } @Override - public void instrument( + public InstrumentationResult.Status instrument( ClassLoader classLoader, ClassNode classNode, MethodNode methodNode, List diagnostics, List probeIds) { - new MetricInstrumentor(this, classLoader, classNode, methodNode, diagnostics, probeIds) + return new MetricInstrumentor(this, classLoader, classNode, methodNode, diagnostics, probeIds) .instrument(); } diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/ProbeDefinition.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/ProbeDefinition.java index b7cc85da7f9..4de096f0619 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/ProbeDefinition.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/ProbeDefinition.java @@ -122,15 +122,15 @@ private static void initTagMap(Map tagMap, Tag[] tags) { } } - public void instrument( + public InstrumentationResult.Status instrument( ClassLoader classLoader, ClassNode classNode, MethodNode methodNode, List diagnostics) { - instrument(classLoader, classNode, methodNode, diagnostics, singletonList(getId())); + return instrument(classLoader, classNode, methodNode, diagnostics, singletonList(getId())); } - public abstract void instrument( + public abstract InstrumentationResult.Status instrument( ClassLoader classLoader, ClassNode classNode, MethodNode methodNode, @@ -143,7 +143,8 @@ public ProbeLocation getLocation() { } @Override - public void evaluate(CapturedContext context, CapturedContext.Status status) {} + public void evaluate( + CapturedContext context, CapturedContext.Status status, MethodLocation methodLocation) {} @Override public void commit( diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/SpanDecorationProbe.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/SpanDecorationProbe.java index 4313a6b7c4d..5de07fc1a7b 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/SpanDecorationProbe.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/SpanDecorationProbe.java @@ -5,12 +5,14 @@ import com.datadog.debugger.agent.LogMessageTemplateBuilder; import com.datadog.debugger.el.EvaluationException; import com.datadog.debugger.el.ProbeCondition; +import com.datadog.debugger.instrumentation.CapturedContextInstrumentor; import com.datadog.debugger.instrumentation.DiagnosticMessage; -import com.datadog.debugger.instrumentation.SpanDecorationInstrumentor; +import com.datadog.debugger.instrumentation.InstrumentationResult; import com.datadog.debugger.sink.Snapshot; import datadog.trace.api.Pair; import datadog.trace.bootstrap.debugger.CapturedContext; import datadog.trace.bootstrap.debugger.EvaluationError; +import datadog.trace.bootstrap.debugger.Limits; import datadog.trace.bootstrap.debugger.MethodLocation; import datadog.trace.bootstrap.debugger.ProbeId; import datadog.trace.bootstrap.debugger.ProbeImplementation; @@ -131,18 +133,20 @@ public SpanDecorationProbe( } @Override - public void instrument( + public InstrumentationResult.Status instrument( ClassLoader classLoader, ClassNode classNode, MethodNode methodNode, List diagnostics, List probeIds) { - new SpanDecorationInstrumentor(this, classLoader, classNode, methodNode, diagnostics, probeIds) + return new CapturedContextInstrumentor( + this, classLoader, classNode, methodNode, diagnostics, probeIds, false, Limits.DEFAULT) .instrument(); } @Override - public void evaluate(CapturedContext context, CapturedContext.Status status) { + public void evaluate( + CapturedContext context, CapturedContext.Status status, MethodLocation methodLocation) { for (Decoration decoration : decorations) { if (decoration.when != null) { try { diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/SpanProbe.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/SpanProbe.java index 719a8997bd9..b577bef8cbb 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/SpanProbe.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/probe/SpanProbe.java @@ -2,6 +2,7 @@ import com.datadog.debugger.agent.Generated; import com.datadog.debugger.instrumentation.DiagnosticMessage; +import com.datadog.debugger.instrumentation.InstrumentationResult; import com.datadog.debugger.instrumentation.SpanInstrumentor; import datadog.trace.bootstrap.debugger.MethodLocation; import datadog.trace.bootstrap.debugger.ProbeId; @@ -23,13 +24,13 @@ public SpanProbe(String language, ProbeId probeId, String[] tagStrs, Where where } @Override - public void instrument( + public InstrumentationResult.Status instrument( ClassLoader classLoader, ClassNode classNode, MethodNode methodNode, List diagnostics, List probeIds) { - new SpanInstrumentor(this, classLoader, classNode, methodNode, diagnostics, probeIds) + return new SpanInstrumentor(this, classLoader, classNode, methodNode, diagnostics, probeIds) .instrument(); } diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/DebuggerSink.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/DebuggerSink.java index bbe22230c77..6167e2de60d 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/DebuggerSink.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/DebuggerSink.java @@ -30,6 +30,7 @@ public class DebuggerSink implements Sink { private final ProbeStatusSink probeStatusSink; private final SnapshotSink snapshotSink; + private final SymbolSink symbolSink; private final DebuggerMetrics debuggerMetrics; private final BatchUploader batchUploader; private final String tags; @@ -42,10 +43,11 @@ public class DebuggerSink implements Sink { public DebuggerSink(Config config) { this( config, - new BatchUploader(config), + new BatchUploader(config, config.getFinalDebuggerSnapshotUrl()), DebuggerMetrics.getInstance(config), new ProbeStatusSink(config), - new SnapshotSink(config)); + new SnapshotSink(config), + new SymbolSink(config)); } DebuggerSink(Config config, BatchUploader batchUploader) { @@ -54,16 +56,18 @@ public DebuggerSink(Config config) { batchUploader, DebuggerMetrics.getInstance(config), new ProbeStatusSink(config), - new SnapshotSink(config)); + new SnapshotSink(config), + new SymbolSink(config)); } public DebuggerSink(Config config, ProbeStatusSink probeStatusSink) { this( config, - new BatchUploader(config), + new BatchUploader(config, config.getFinalDebuggerSnapshotUrl()), DebuggerMetrics.getInstance(config), probeStatusSink, - new SnapshotSink(config)); + new SnapshotSink(config), + new SymbolSink(config)); } DebuggerSink(Config config, BatchUploader batchUploader, DebuggerMetrics debuggerMetrics) { @@ -72,7 +76,8 @@ public DebuggerSink(Config config, ProbeStatusSink probeStatusSink) { batchUploader, debuggerMetrics, new ProbeStatusSink(config), - new SnapshotSink(config)); + new SnapshotSink(config), + new SymbolSink(config)); } public DebuggerSink( @@ -80,12 +85,14 @@ public DebuggerSink( BatchUploader batchUploader, DebuggerMetrics debuggerMetrics, ProbeStatusSink probeStatusSink, - SnapshotSink snapshotSink) { + SnapshotSink snapshotSink, + SymbolSink symbolSink) { this.batchUploader = batchUploader; tags = getDefaultTagsMergedWithGlobalTags(config); this.debuggerMetrics = debuggerMetrics; this.probeStatusSink = probeStatusSink; this.snapshotSink = snapshotSink; + this.symbolSink = symbolSink; this.uploadFlushInterval = config.getDebuggerUploadFlushInterval(); } @@ -136,6 +143,10 @@ public BatchUploader getSnapshotUploader() { return batchUploader; } + public SymbolSink getSymbolSink() { + return symbolSink; + } + @Override public void addSnapshot(Snapshot snapshot) { boolean added = snapshotSink.offer(snapshot); @@ -160,15 +171,16 @@ private void reschedule() { // visible for testing void flush(DebuggerSink ignored) { + symbolSink.flush(); List diagnostics = probeStatusSink.getSerializedDiagnostics(); List snapshots = snapshotSink.getSerializedSnapshots(); if (snapshots.size() + diagnostics.size() == 0) { return; } - if (snapshots.size() >= 1) { + if (snapshots.size() > 0) { uploadPayloads(snapshots); } - if (diagnostics.size() >= 1) { + if (diagnostics.size() > 0) { uploadPayloads(diagnostics); } } @@ -221,7 +233,6 @@ public void removeDiagnostics(ProbeId probeId) { probeStatusSink.removeDiagnostics(probeId); } - @Override public void addDiagnostics(ProbeId probeId, List messages) { for (DiagnosticMessage msg : messages) { switch (msg.getKind()) { diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/ProbeStatusSink.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/ProbeStatusSink.java index c4be25c2b09..017bf5e544a 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/ProbeStatusSink.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/ProbeStatusSink.java @@ -66,6 +66,10 @@ public List getSerializedDiagnostics() { List serializedDiagnostics = new ArrayList<>(); for (ProbeStatus message : diagnostics) { try { + LOGGER.debug( + "Sending probe status[{}] for probe id: {}", + message.getDiagnostics().getStatus(), + message.getDiagnostics().getProbeId().getId()); serializedDiagnostics.add(PROBE_STATUS_ADAPTER.toJson(message)); } catch (Exception e) { ExceptionHelper.logException(LOGGER, e, "Error during probe status serialization:"); diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/Sink.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/Sink.java index fbc7c44989b..687c83ef747 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/Sink.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/Sink.java @@ -1,14 +1,9 @@ package com.datadog.debugger.sink; -import com.datadog.debugger.instrumentation.DiagnosticMessage; import datadog.trace.bootstrap.debugger.DebuggerContext; -import datadog.trace.bootstrap.debugger.ProbeId; -import java.util.List; public interface Sink { void addSnapshot(Snapshot snapshot); - void addDiagnostics(ProbeId probeId, List messages); - void skipSnapshot(String probeId, DebuggerContext.SkipCause cause); } diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/Snapshot.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/Snapshot.java index 5a1bec8ead8..7c59823006e 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/Snapshot.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/Snapshot.java @@ -26,8 +26,8 @@ public class Snapshot { private final ProbeImplementation probe; private final String language; private final transient CapturedThread thread; - private String traceId; // trace_id - private String spanId; // span_id + private transient String traceId; + private transient String spanId; private List evaluationErrors; private transient String message; private final transient int maxDepth; @@ -176,6 +176,14 @@ public void recordStackTrace(int offset) { } } + public void setTraceId(String traceId) { + this.traceId = traceId; + } + + public void setSpanId(String spanId) { + this.spanId = spanId; + } + public enum Kind { ENTER, RETURN, diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/SymbolSink.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/SymbolSink.java new file mode 100644 index 00000000000..1ff688a35ad --- /dev/null +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/SymbolSink.java @@ -0,0 +1,85 @@ +package com.datadog.debugger.sink; + +import com.datadog.debugger.symbol.Scope; +import com.datadog.debugger.symbol.ServiceVersion; +import com.datadog.debugger.uploader.BatchUploader; +import com.datadog.debugger.util.ExceptionHelper; +import com.datadog.debugger.util.MoshiHelper; +import com.squareup.moshi.JsonAdapter; +import datadog.trace.api.Config; +import datadog.trace.util.TagsHelper; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SymbolSink { + + private static final Logger LOGGER = LoggerFactory.getLogger(SymbolSink.class); + private static final int CAPACITY = 1024; + private static final JsonAdapter SERVICE_VERSION_ADAPTER = + MoshiHelper.createMoshiSymbol().adapter(ServiceVersion.class); + private static final String EVENT_FORMAT = + "{%n" + + "\"ddsource\": \"dd_debugger\",%n" + + "\"service\": \"%s\",%n" + + "\"runtimeId\": \"%s\"%n" + + "}"; + + private final String serviceName; + private final String env; + private final String version; + private final BatchUploader symbolUploader; + private final BlockingQueue scopes = new ArrayBlockingQueue<>(CAPACITY); + private final BatchUploader.MultiPartContent event; + + public SymbolSink(Config config) { + this(config, new BatchUploader(config, config.getFinalDebuggerSymDBUrl())); + } + + SymbolSink(Config config, BatchUploader symbolUploader) { + this.serviceName = TagsHelper.sanitize(config.getServiceName()); + this.env = config.getEnv(); + this.version = config.getVersion(); + this.symbolUploader = symbolUploader; + byte[] eventContent = + String.format(EVENT_FORMAT, serviceName, config.getRuntimeId()) + .getBytes(StandardCharsets.UTF_8); + this.event = new BatchUploader.MultiPartContent(eventContent, "event", "event.json"); + } + + public boolean addScope(Scope jarScope) { + ServiceVersion serviceVersion = + new ServiceVersion(serviceName, env, version, "JAVA", Collections.singletonList(jarScope)); + return scopes.offer(serviceVersion); + } + + public void flush() { + if (scopes.isEmpty()) { + return; + } + List scopesToSerialize = new ArrayList<>(); + scopes.drainTo(scopesToSerialize); + LOGGER.debug("Sending {} scopes", scopesToSerialize.size()); + for (ServiceVersion serviceVersion : scopesToSerialize) { + try { + String json = SERVICE_VERSION_ADAPTER.toJson(serviceVersion); + LOGGER.debug( + "Sending scope: {}, size={}", + serviceVersion.getScopes().get(0).getName(), + json.length()); + symbolUploader.uploadAsMultipart( + "", + event, + new BatchUploader.MultiPartContent( + json.getBytes(StandardCharsets.UTF_8), "file", "file.json")); + } catch (Exception e) { + ExceptionHelper.logException(LOGGER, e, "Error during scope serialization:"); + } + } + } +} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/Scope.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/Scope.java new file mode 100644 index 00000000000..4963e687959 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/Scope.java @@ -0,0 +1,149 @@ +package com.datadog.debugger.symbol; + +import com.squareup.moshi.Json; +import java.util.List; + +public class Scope { + @Json(name = "scope_type") + private final ScopeType scopeType; + + @Json(name = "source_file") + private final String sourceFile; + + @Json(name = "start_line") + private final int startLine; + + @Json(name = "end_line") + private final int endLine; + + private final String name; + + @Json(name = "language_specifics") + private final List languageSpecifics; + + private final List symbols; + private final List scopes; + + public Scope( + ScopeType scopeType, + String sourceFile, + int startLine, + int endLine, + String name, + List languageSpecifics, + List symbols, + List scopes) { + this.scopeType = scopeType; + this.sourceFile = sourceFile; + this.startLine = startLine; + this.endLine = endLine; + this.name = name; + this.languageSpecifics = languageSpecifics; + this.symbols = symbols; + this.scopes = scopes; + } + + public ScopeType getScopeType() { + return scopeType; + } + + public String getSourceFile() { + return sourceFile; + } + + public int getStartLine() { + return startLine; + } + + public int getEndLine() { + return endLine; + } + + public String getName() { + return name; + } + + public List getLanguageSpecifics() { + return languageSpecifics; + } + + public List getSymbols() { + return symbols; + } + + public List getScopes() { + return scopes; + } + + @Override + public String toString() { + return "Scope{" + + "scopeType=" + + scopeType + + ", sourceFile='" + + sourceFile + + '\'' + + ", startLine=" + + startLine + + ", endLine=" + + endLine + + ", name='" + + name + + '\'' + + ", languageSpecifics=" + + languageSpecifics + + ", symbols=" + + symbols + + ", scopes=" + + scopes + + '}'; + } + + public static Builder builder( + ScopeType scopeType, String sourceFile, int startLine, int endLine) { + return new Builder(scopeType, sourceFile, startLine, endLine); + } + + public static class Builder { + private final ScopeType scopeType; + private final String sourceFile; + private final int startLine; + private final int endLine; + private String name; + private List languageSpecifics; + private List symbols; + private List scopes; + + public Builder(ScopeType scopeType, String sourceFile, int startLine, int endLine) { + this.scopeType = scopeType; + this.sourceFile = sourceFile; + this.startLine = startLine; + this.endLine = endLine; + } + + public Builder name(String name) { + this.name = name; + return this; + } + + public Builder languageSpecifics(List languageSpecifics) { + this.languageSpecifics = languageSpecifics; + return this; + } + + public Builder symbols(List symbols) { + this.symbols = symbols; + return this; + } + + public Builder scopes(List scopes) { + this.scopes = scopes; + return this; + } + + public Scope build() { + return new Scope( + scopeType, sourceFile, startLine, endLine, name, languageSpecifics, symbols, scopes); + } + } +} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/ScopeType.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/ScopeType.java new file mode 100644 index 00000000000..99d8c004a55 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/ScopeType.java @@ -0,0 +1,9 @@ +package com.datadog.debugger.symbol; + +public enum ScopeType { + JAR, + CLASS, + METHOD, + LOCAL, + CLOSURE +} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/ServiceVersion.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/ServiceVersion.java new file mode 100644 index 00000000000..a88fc25da63 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/ServiceVersion.java @@ -0,0 +1,25 @@ +package com.datadog.debugger.symbol; + +import java.util.List; + +public class ServiceVersion { + private final String service; + + private final String env; + private final String version; + private final String language; + private final List scopes; + + public ServiceVersion( + String service, String env, String version, String language, List scopes) { + this.service = service; + this.env = env; + this.version = version; + this.language = language; + this.scopes = scopes; + } + + public List getScopes() { + return scopes; + } +} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/Symbol.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/Symbol.java new file mode 100644 index 00000000000..55380f6c916 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/Symbol.java @@ -0,0 +1,51 @@ +package com.datadog.debugger.symbol; + +import com.squareup.moshi.Json; + +public class Symbol { + @Json(name = "symbol_type") + private final SymbolType symbolType; + + private final String name; + private final int line; + private final String type; + + public Symbol(SymbolType symbolType, String name, int line, String type) { + this.symbolType = symbolType; + this.name = name; + this.line = line; + this.type = type; + } + + public SymbolType getSymbolType() { + return symbolType; + } + + public String getName() { + return name; + } + + public int getLine() { + return line; + } + + public String getType() { + return type; + } + + @Override + public String toString() { + return "Symbol{" + + "symbolType=" + + symbolType + + ", name='" + + name + + '\'' + + ", line=" + + line + + ", type='" + + type + + '\'' + + '}'; + } +} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/SymbolExtractionTransformer.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/SymbolExtractionTransformer.java new file mode 100644 index 00000000000..203a306a1c0 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/SymbolExtractionTransformer.java @@ -0,0 +1,140 @@ +package com.datadog.debugger.symbol; + +import com.datadog.debugger.agent.AllowListHelper; +import com.datadog.debugger.agent.Configuration; +import com.datadog.debugger.sink.SymbolSink; +import datadog.trace.api.Config; +import datadog.trace.util.AgentTaskScheduler; +import datadog.trace.util.Strings; +import java.lang.instrument.ClassFileTransformer; +import java.net.URL; +import java.security.CodeSource; +import java.security.ProtectionDomain; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.regex.Pattern; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SymbolExtractionTransformer implements ClassFileTransformer { + + private static final Logger LOGGER = LoggerFactory.getLogger(SymbolExtractionTransformer.class); + private static final Pattern COMMA_PATTERN = Pattern.compile(","); + + private final SymbolSink sink; + private final Map jarScopesByName = new HashMap<>(); + private final AgentTaskScheduler.Scheduled scheduled; + private final Object jarScopeLock = new Object(); + private final AllowListHelper allowListHelper; + private int totalClasses; + private final int symbolFlushThreshold; + + public SymbolExtractionTransformer() { + this(new SymbolSink(Config.get()), Config.get()); + } + + public SymbolExtractionTransformer(SymbolSink sink, Config config) { + this.sink = sink; + this.scheduled = + AgentTaskScheduler.INSTANCE.scheduleAtFixedRate( + this::flushRemainingScopes, this, 30, 30, TimeUnit.SECONDS); + String includes = config.getDebuggerSymbolIncludes(); + if (includes != null) { + this.allowListHelper = new AllowListHelper(buildFilterList(includes)); + } else { + this.allowListHelper = null; + } + this.symbolFlushThreshold = config.getDebuggerSymbolFlushThreshold(); + } + + private void flushRemainingScopes(SymbolExtractionTransformer symbolExtractionTransformer) { + synchronized (jarScopeLock) { + if (jarScopesByName.isEmpty()) { + return; + } + LOGGER.debug("Flush remaining scopes"); + addJarScope(null, true); // force flush remaining scopes + } + } + + private Configuration.FilterList buildFilterList(String includes) { + String[] includeParts = COMMA_PATTERN.split(includes); + return new Configuration.FilterList(Arrays.asList(includeParts), Collections.emptyList()); + } + + @Override + public byte[] transform( + ClassLoader loader, + String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, + byte[] classfileBuffer) { + if (className == null) { + return null; + } + if (allowListHelper == null) { + if (className.startsWith("java/") + || className.startsWith("javax/") + || className.startsWith("jdk/") + || className.startsWith("sun/") + || className.startsWith("com/sun/") + || className.startsWith("datadog/") + || className.startsWith("com/datadog/")) { + return null; + } + } else { + if (!allowListHelper.isAllowed(Strings.getClassName(className))) { + return null; + } + } + String jarName = "DEFAULT"; + if (protectionDomain != null) { + CodeSource codeSource = protectionDomain.getCodeSource(); + if (codeSource != null) { + URL location = codeSource.getLocation(); + if (location != null) { + jarName = location.getFile(); + } + } + } + LOGGER.debug("Extracting Symbols from: {}, located in: {}", className, jarName); + Scope jarScope = SymbolExtractor.extract(classfileBuffer, jarName); + addJarScope(jarScope, false); + return null; + } + + private void addJarScope(Scope jarScope, boolean forceFlush) { + List scopes = Collections.emptyList(); + synchronized (jarScopeLock) { + if (jarScope != null) { + Scope scope = jarScopesByName.get(jarScope.getName()); + if (scope != null) { + scope.getScopes().addAll(jarScope.getScopes()); + } else { + jarScopesByName.put(jarScope.getName(), jarScope); + } + totalClasses++; + } + if (totalClasses >= symbolFlushThreshold || forceFlush) { + scopes = new ArrayList<>(jarScopesByName.values()); + jarScopesByName.clear(); + totalClasses = 0; + } + } + if (!scopes.isEmpty()) { + LOGGER.debug("dumping {} jar scopes to sink", scopes.size()); + for (Scope scope : scopes) { + LOGGER.debug( + "dumping {} class scopes to sink from scope: {}", + scope.getScopes().size(), + scope.getName()); + sink.addScope(scope); + } + } + } +} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/SymbolExtractor.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/SymbolExtractor.java new file mode 100644 index 00000000000..3498234820e --- /dev/null +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/SymbolExtractor.java @@ -0,0 +1,264 @@ +package com.datadog.debugger.symbol; + +import static com.datadog.debugger.instrumentation.ASMHelper.adjustLocalVarsBasedOnArgs; +import static com.datadog.debugger.instrumentation.ASMHelper.createLocalVarNodes; +import static com.datadog.debugger.instrumentation.ASMHelper.sortLocalVariables; + +import com.datadog.debugger.instrumentation.ASMHelper; +import datadog.trace.util.Strings; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.Type; +import org.objectweb.asm.tree.AbstractInsnNode; +import org.objectweb.asm.tree.ClassNode; +import org.objectweb.asm.tree.FieldNode; +import org.objectweb.asm.tree.LabelNode; +import org.objectweb.asm.tree.LineNumberNode; +import org.objectweb.asm.tree.LocalVariableNode; +import org.objectweb.asm.tree.MethodNode; +import org.slf4j.LoggerFactory; + +public class SymbolExtractor { + + public static Scope extract(byte[] classFileBuffer, String jarName) { + ClassNode classNode = parseClassFile(classFileBuffer); + return extractScopes(classNode, jarName); + } + + private static Scope extractScopes(ClassNode classNode, String jarName) { + try { + String sourceFile = extractSourceFile(classNode); + List methodScopes = new ArrayList<>(); + for (MethodNode method : classNode.methods) { + MethodLineInfo methodLineInfo = extractMethodLineInfo(method); + List varScopes = new ArrayList<>(); + List methodSymbols = new ArrayList<>(); + int localVarBaseSlot = extractArgs(method, methodSymbols, methodLineInfo.start); + extractScopesFromVariables( + sourceFile, method, methodLineInfo.lineMap, varScopes, localVarBaseSlot); + ScopeType methodScopeType = ScopeType.METHOD; + if (method.name.startsWith("lambda$")) { + methodScopeType = ScopeType.CLOSURE; + } + Scope methodScope = + Scope.builder(methodScopeType, sourceFile, methodLineInfo.start, methodLineInfo.end) + .name(method.name) + .scopes(varScopes) + .symbols(methodSymbols) + .build(); + methodScopes.add(methodScope); + } + int classStartLine = Integer.MAX_VALUE; + int classEndLine = 0; + for (Scope scope : methodScopes) { + classStartLine = Math.min(classStartLine, scope.getStartLine()); + classEndLine = Math.max(classEndLine, scope.getEndLine()); + } + List fields = new ArrayList<>(); + for (FieldNode fieldNode : classNode.fields) { + SymbolType symbolType = + ASMHelper.isStaticField(fieldNode) ? SymbolType.STATIC_FIELD : SymbolType.FIELD; + fields.add( + new Symbol(symbolType, fieldNode.name, 0, Type.getType(fieldNode.desc).getClassName())); + } + Scope classScope = + Scope.builder(ScopeType.CLASS, sourceFile, classStartLine, classEndLine) + .name(Strings.getClassName(classNode.name)) + .scopes(methodScopes) + .symbols(fields) + .build(); + return Scope.builder(ScopeType.JAR, jarName, 0, 0) + .name(jarName) + .scopes(new ArrayList<>(Collections.singletonList(classScope))) + .build(); + } catch (Exception ex) { + LoggerFactory.getLogger(SymbolExtractor.class).info("", ex); + return null; + } + } + + private static String extractSourceFile(ClassNode classNode) { + String packageName = classNode.name; + int idx = packageName.lastIndexOf('/'); + packageName = idx >= 0 ? packageName.substring(0, idx + 1) : ""; + return packageName + classNode.sourceFile; + } + + private static int extractArgs( + MethodNode method, List methodSymbols, int methodStartLine) { + boolean isStatic = (method.access & Opcodes.ACC_STATIC) != 0; + int slot = isStatic ? 0 : 1; + if (method.localVariables == null || method.localVariables.size() == 0) { + return slot; + } + Type[] argTypes = Type.getArgumentTypes(method.desc); + if (argTypes.length == 0) { + return slot; + } + List sortedLocalVars = sortLocalVariables(method.localVariables); + LocalVariableNode[] localVarsBySlot = createLocalVarNodes(sortedLocalVars); + adjustLocalVarsBasedOnArgs(isStatic, localVarsBySlot, argTypes, sortedLocalVars); + for (Type argType : argTypes) { + if (slot >= localVarsBySlot.length) { + break; + } + String argName = localVarsBySlot[slot] != null ? localVarsBySlot[slot].name : "p" + slot; + methodSymbols.add( + new Symbol(SymbolType.ARG, argName, methodStartLine, argType.getClassName())); + slot += argType.getSize(); + } + return slot; + } + + private static void extractScopesFromVariables( + String sourceFile, + MethodNode methodNode, + Map monotonicLineMap, + List varScopes, + int localVarBaseSlot) { + if (methodNode.localVariables == null) { + return; + } + // using a LinkedHashMap only for having a stable order of local scopes (tests) + Map> varsByEndLabel = new LinkedHashMap<>(); + for (int i = 0; i < methodNode.localVariables.size(); i++) { + LocalVariableNode localVariable = methodNode.localVariables.get(i); + if (localVariable.index < localVarBaseSlot) { + continue; + } + varsByEndLabel.merge( + localVariable.end, + new ArrayList<>(Collections.singletonList(localVariable)), + (curr, next) -> { + curr.addAll(next); + return curr; + }); + } + List tmpScopes = new ArrayList<>(); + for (Map.Entry> entry : varsByEndLabel.entrySet()) { + List varSymbols = new ArrayList<>(); + int minLine = Integer.MAX_VALUE; + for (LocalVariableNode var : entry.getValue()) { + int line = monotonicLineMap.get(var.start.getLabel()); + minLine = Math.min(line, minLine); + varSymbols.add( + new Symbol(SymbolType.LOCAL, var.name, line, Type.getType(var.desc).getClassName())); + } + int endLine = monotonicLineMap.get(entry.getKey().getLabel()); + Scope varScope = + Scope.builder(ScopeType.LOCAL, sourceFile, minLine, endLine) + .symbols(varSymbols) + .scopes(new ArrayList<>()) + .build(); + tmpScopes.add(varScope); + } + nestScopes(varScopes, tmpScopes); + } + + private static Scope removeWidestScope(List scopes) { + Scope widestScope = null; + for (Scope scope : scopes) { + widestScope = widestScope != null ? maxScope(widestScope, scope) : scope; + } + // Remove the actual widest instance from the list, based on reference equality + scopes.remove(widestScope); + return widestScope; + } + + private static void nestScopes(List outerScopes, List scopes) { + Scope widestScope = removeWidestScope(scopes); + if (widestScope == null) { + return; + } + outerScopes.add(widestScope); + for (Scope scope : scopes) { + boolean added = false; + for (Scope outerScope : outerScopes) { + if (isInnerScope(outerScope, scope)) { + outerScope.getScopes().add(scope); + added = true; + break; + } + } + if (!added) { + outerScopes.add(scope); + } + } + for (Scope outerScope : outerScopes) { + List tmpScopes = new ArrayList<>(outerScope.getScopes()); + outerScope.getScopes().clear(); + nestScopes(outerScope.getScopes(), tmpScopes); + } + } + + private static boolean isInnerScope(Scope enclosingScope, Scope scope) { + return scope.getStartLine() >= enclosingScope.getStartLine() + && scope.getEndLine() <= enclosingScope.getEndLine(); + } + + private static Scope maxScope(Scope scope1, Scope scope2) { + return scope1.getStartLine() > scope2.getStartLine() + || scope1.getEndLine() < scope2.getEndLine() + ? scope2 + : scope1; + } + + private static int getFirstLine(MethodNode methodNode) { + AbstractInsnNode node = methodNode.instructions.getFirst(); + while (node != null) { + if (node.getType() == AbstractInsnNode.LINE) { + LineNumberNode lineNumberNode = (LineNumberNode) node; + return lineNumberNode.line; + } + node = node.getNext(); + } + return 0; + } + + private static MethodLineInfo extractMethodLineInfo(MethodNode methodNode) { + Map map = new HashMap<>(); + int startLine = getFirstLine(methodNode); + int maxLine = startLine; + AbstractInsnNode node = methodNode.instructions.getFirst(); + while (node != null) { + if (node.getType() == AbstractInsnNode.LINE) { + LineNumberNode lineNumberNode = (LineNumberNode) node; + maxLine = Math.max(lineNumberNode.line, maxLine); + } + if (node.getType() == AbstractInsnNode.LABEL) { + if (node instanceof LabelNode) { + LabelNode labelNode = (LabelNode) node; + map.put(labelNode.getLabel(), maxLine); + } + } + node = node.getNext(); + } + return new MethodLineInfo(startLine, maxLine, map); + } + + private static ClassNode parseClassFile(byte[] classfileBuffer) { + ClassReader reader = new ClassReader(classfileBuffer); + ClassNode classNode = new ClassNode(); + reader.accept(classNode, ClassReader.SKIP_FRAMES); + return classNode; + } + + public static class MethodLineInfo { + final int start; + final int end; + final Map lineMap; + + public MethodLineInfo(int start, int end, Map lineMap) { + this.start = start; + this.end = end; + this.lineMap = lineMap; + } + } +} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/SymbolType.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/SymbolType.java new file mode 100644 index 00000000000..94b5b889c74 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/symbol/SymbolType.java @@ -0,0 +1,8 @@ +package com.datadog.debugger.symbol; + +public enum SymbolType { + FIELD, + STATIC_FIELD, + ARG, + LOCAL +} diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/uploader/BatchUploader.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/uploader/BatchUploader.java index 422835ab659..15dbaaf604d 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/uploader/BatchUploader.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/uploader/BatchUploader.java @@ -18,10 +18,10 @@ import java.util.concurrent.TimeoutException; import okhttp3.Call; import okhttp3.Callback; -import okhttp3.ConnectionPool; import okhttp3.Dispatcher; import okhttp3.HttpUrl; import okhttp3.MediaType; +import okhttp3.MultipartBody; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; @@ -32,13 +32,37 @@ /** Handles batching logic of upload requests sent to the intake */ public class BatchUploader { + public static class MultiPartContent { + private final byte[] content; + private final String partName; + private final String fileName; + + public MultiPartContent(byte[] content, String partName, String fileName) { + this.content = content; + this.partName = partName; + this.fileName = fileName; + } + + public byte[] getContent() { + return content; + } + + public String getPartName() { + return partName; + } + + public String getFileName() { + return fileName; + } + } + private static final Logger log = LoggerFactory.getLogger(BatchUploader.class); private static final int MINUTES_BETWEEN_ERROR_LOG = 5; private static final MediaType APPLICATION_JSON = MediaType.parse("application/json"); - private static final String HEADER_DD_API_KEY = "DD-API-KEY"; private static final String HEADER_DD_CONTAINER_ID = "Datadog-Container-ID"; private final String containerId; + static final String HEADER_DD_API_KEY = "DD-API-KEY"; static final int MAX_RUNNING_REQUESTS = 10; static final int MAX_ENQUEUED_REQUESTS = 20; static final int TERMINATION_TIMEOUT = 5; @@ -54,23 +78,23 @@ public class BatchUploader { private final Phaser inflightRequests = new Phaser(1); - public BatchUploader(Config config) { - this(config, new RatelimitedLogger(log, MINUTES_BETWEEN_ERROR_LOG, TimeUnit.MINUTES)); + public BatchUploader(Config config, String endpoint) { + this(config, endpoint, new RatelimitedLogger(log, MINUTES_BETWEEN_ERROR_LOG, TimeUnit.MINUTES)); } - BatchUploader(Config config, RatelimitedLogger ratelimitedLogger) { - this(config, ratelimitedLogger, ContainerInfo.get().containerId); + BatchUploader(Config config, String endpoint, RatelimitedLogger ratelimitedLogger) { + this(config, endpoint, ratelimitedLogger, ContainerInfo.get().containerId); } // Visible for testing - BatchUploader(Config config, RatelimitedLogger ratelimitedLogger, String containerId) { + BatchUploader( + Config config, String endpoint, RatelimitedLogger ratelimitedLogger, String containerId) { instrumentTheWorld = config.isDebuggerInstrumentTheWorld(); - String url = config.getFinalDebuggerSnapshotUrl(); - if (url == null || url.length() == 0) { - throw new IllegalArgumentException("Snapshot url is empty"); + if (endpoint == null || endpoint.length() == 0) { + throw new IllegalArgumentException("Endpoint url is empty"); } - urlBase = HttpUrl.get(url); - log.debug("Started SnapshotUploader with target url {}", urlBase); + urlBase = HttpUrl.get(endpoint); + log.debug("Started BatchUploader with target url {}", urlBase); apiKey = config.getApiKey(); this.ratelimitedLogger = ratelimitedLogger; responseCallback = new ResponseCallback(ratelimitedLogger, inflightRequests); @@ -84,10 +108,6 @@ public BatchUploader(Config config) { new SynchronousQueue<>(), new AgentThreadFactory(DEBUGGER_HTTP_DISPATCHER)); this.containerId = containerId; - // Reusing connections causes non daemon threads to be created which causes agent to prevent app - // from exiting. See https://github.com/square/okhttp/issues/4029 for some details. - ConnectionPool connectionPool = new ConnectionPool(MAX_RUNNING_REQUESTS, 1, TimeUnit.SECONDS); - Duration requestTimeout = Duration.ofSeconds(config.getDebuggerUploadTimeout()); client = OkHttpUtils.buildHttpClient( @@ -110,21 +130,41 @@ public void upload(byte[] batch) { } public void upload(byte[] batch, String tags) { + doUpload(() -> makeUploadRequest(batch, tags)); + } + + public void uploadAsMultipart(String tags, MultiPartContent... parts) { + doUpload(() -> makeMultipartUploadRequest(tags, parts)); + } + + private void makeMultipartUploadRequest(String tags, MultiPartContent[] parts) { + int contentLength = 0; + MultipartBody.Builder builder = new MultipartBody.Builder().setType(MultipartBody.FORM); + for (MultiPartContent part : parts) { + RequestBody fileBody = RequestBody.create(APPLICATION_JSON, part.content); + contentLength += part.content.length; + builder.addFormDataPart(part.partName, part.fileName, fileBody); + } + MultipartBody body = builder.build(); + buildAndSendRequest(body, contentLength, tags); + } + + private void doUpload(Runnable makeRequest) { if (instrumentTheWorld) { // no upload in Instrument-The-World mode return; } try { if (canEnqueueMoreRequests()) { - makeUploadRequest(batch, tags); + makeRequest.run(); debuggerMetrics.count("batch.uploaded", 1); } else { debuggerMetrics.count("request.queue.full", 1); ratelimitedLogger.warn("Cannot upload batch data: too many enqueued requests!"); } - } catch (final IllegalStateException | IOException e) { + } catch (Exception ex) { debuggerMetrics.count("batch.upload.error", 1); - ratelimitedLogger.warn("Problem uploading batch!", e); + ratelimitedLogger.warn("Problem uploading batch!", ex); } } @@ -136,11 +176,15 @@ OkHttpClient getClient() { return client; } - private void makeUploadRequest(byte[] json, String tags) throws IOException { + private void makeUploadRequest(byte[] json, String tags) { + int contentLength = json.length; // use RequestBody.create(MediaType, byte[]) to avoid changing Content-Type to // "Content-Type: application/json; charset=UTF-8" which is not recognized - int contentLength = json.length; RequestBody body = RequestBody.create(APPLICATION_JSON, json); + buildAndSendRequest(body, contentLength, tags); + } + + private void buildAndSendRequest(RequestBody body, int contentLength, String tags) { debuggerMetrics.histogram("batch.uploader.request.size", contentLength); if (log.isDebugEnabled()) { log.debug("Uploading batch data size={} bytes", contentLength); diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/MoshiHelper.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/MoshiHelper.java index 47f1828e773..31e132639a6 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/MoshiHelper.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/MoshiHelper.java @@ -46,4 +46,8 @@ public static JsonAdapter> createGenericAdapter() { ParameterizedType type = Types.newParameterizedType(Map.class, String.class, Object.class); return new Moshi.Builder().build().adapter(type); } + + public static Moshi createMoshiSymbol() { + return new Moshi.Builder().build(); + } } diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/MoshiSnapshotHelper.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/MoshiSnapshotHelper.java index 8708534ae9c..5f5a9e63ee4 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/MoshiSnapshotHelper.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/MoshiSnapshotHelper.java @@ -11,6 +11,7 @@ import datadog.trace.bootstrap.debugger.ProbeImplementation; import datadog.trace.bootstrap.debugger.ProbeLocation; import datadog.trace.bootstrap.debugger.util.TimeoutChecker; +import datadog.trace.bootstrap.debugger.util.WellKnownClasses; import java.io.ByteArrayInputStream; import java.io.IOException; import java.lang.annotation.Annotation; @@ -43,6 +44,8 @@ public class MoshiSnapshotHelper { public static final String COLLECTION_SIZE_REASON = "collectionSize"; public static final String TIMEOUT_REASON = "timeout"; public static final String DEPTH_REASON = "depth"; + public static final String REDACTED_IDENT_REASON = "redactedIdent"; + public static final String REDACTED_TYPE_REASON = "redactedType"; public static final String TYPE = "type"; public static final String VALUE = "value"; public static final String FIELDS = "fields"; @@ -460,6 +463,18 @@ public void notCaptured(SerializerWithLimits.NotCapturedReason reason) throws Ex jsonWriter.value(TIMEOUT_REASON); break; } + case REDACTED_IDENT: + { + jsonWriter.name(NOT_CAPTURED_REASON); + jsonWriter.value(REDACTED_IDENT_REASON); + break; + } + case REDACTED_TYPE: + { + jsonWriter.name(NOT_CAPTURED_REASON); + jsonWriter.value(REDACTED_TYPE_REASON); + break; + } default: throw new RuntimeException("Unsupported NotCapturedReason: " + reason); } diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/SerializerWithLimits.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/SerializerWithLimits.java index 588d7c6acf1..5d2da23f805 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/SerializerWithLimits.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/SerializerWithLimits.java @@ -1,8 +1,12 @@ package com.datadog.debugger.util; +import static datadog.trace.bootstrap.debugger.util.Redaction.REDACTED_VALUE; + import datadog.trace.bootstrap.debugger.CapturedContext; import datadog.trace.bootstrap.debugger.Limits; +import datadog.trace.bootstrap.debugger.util.Redaction; import datadog.trace.bootstrap.debugger.util.TimeoutChecker; +import datadog.trace.bootstrap.debugger.util.WellKnownClasses; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.Modifier; @@ -45,7 +49,9 @@ public static boolean isPrimitive(String type) { enum NotCapturedReason { MAX_DEPTH, FIELD_COUNT, - TIMEOUT + TIMEOUT, + REDACTED_IDENT, + REDACTED_TYPE } public interface TokenWriter { @@ -115,6 +121,17 @@ public void serialize(Object value, String type, Limits limits) throws Exception throw new IllegalArgumentException("Type is required for serialization"); } tokenWriter.prologue(value, type); + NotCapturedReason reason = null; + if (value == REDACTED_VALUE) { + reason = NotCapturedReason.REDACTED_IDENT; + } else if (Redaction.isRedactedType(type)) { + reason = NotCapturedReason.REDACTED_TYPE; + } + if (reason != null) { + tokenWriter.notCaptured(reason); + tokenWriter.epilogue(value); + return; + } if (timeoutChecker.isTimedOut(System.currentTimeMillis())) { tokenWriter.notCaptured(NotCapturedReason.TIMEOUT); tokenWriter.epilogue(value); @@ -152,9 +169,13 @@ private void serializeMap(Object value, Limits limits) throws Exception { int size = 0; try { map = (Map) value; - size = map.size(); // /!\ alien call /!\ - Set> entries = map.entrySet(); // /!\ alien call /!\ - isComplete = serializeMapEntries(entries, limits); // /!\ contains alien calls /!\ + if (WellKnownClasses.isSizeSafe(map)) { + size = map.size(); // /!\ alien call /!\ + Set> entries = map.entrySet(); // /!\ alien call /!\ + isComplete = serializeMapEntries(entries, limits); // /!\ contains alien calls /!\ + } else { + throw new RuntimeException("Unsupported Map type: " + map.getClass().getTypeName()); + } tokenWriter.mapEpilogue(isComplete, size); } catch (Exception ex) { tokenWriter.mapEpilogue(isComplete, size); @@ -169,8 +190,12 @@ private void serializeCollection(Object value, Limits limits) throws Exception { int size = 0; try { col = (Collection) value; - size = col.size(); // /!\ alien call /!\ - isComplete = serializeCollection(col, limits); // /!\ contains alien calls /!\ + if (WellKnownClasses.isSizeSafe(col)) { + size = col.size(); // /!\ alien call /!\ + isComplete = serializeCollection(col, limits); // /!\ contains alien calls /!\ + } else { + throw new RuntimeException("Unsupported Collection type: " + col.getClass().getTypeName()); + } tokenWriter.collectionEpilogue(value, isComplete, size); } catch (Exception ex) { tokenWriter.collectionEpilogue(value, isComplete, size); @@ -270,6 +295,9 @@ private void onField(Field field, Object value, Limits limits) throws Exception } else { typeName = value != null ? value.getClass().getTypeName() : field.getType().getTypeName(); } + if (Redaction.isRedactedKeyword(field.getName())) { + value = REDACTED_VALUE; + } serialize( value instanceof CapturedContext.CapturedValue ? ((CapturedContext.CapturedValue) value).getValue() @@ -414,7 +442,12 @@ private boolean serializeMapEntries(Set> entries, Limi Map.Entry entry = (Map.Entry) it.next(); // /!\ alien call /!\ tokenWriter.mapEntryPrologue(entry); Object keyObj = entry.getKey(); // /!\ alien call /!\ - Object valObj = entry.getValue(); // /!\ alien call /!\ + Object valObj; + if (keyObj instanceof String && Redaction.isRedactedKeyword((String) keyObj)) { + valObj = REDACTED_VALUE; + } else { + valObj = entry.getValue(); // /!\ alien call /!\ + } serialize( keyObj, keyObj != null ? keyObj.getClass().getTypeName() : Object.class.getTypeName(), diff --git a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/StringTokenWriter.java b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/StringTokenWriter.java index 54528ec9ddd..fcd89a87dc1 100644 --- a/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/StringTokenWriter.java +++ b/dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/util/StringTokenWriter.java @@ -1,5 +1,8 @@ package com.datadog.debugger.util; +import static com.datadog.debugger.util.MoshiSnapshotHelper.REDACTED_IDENT_REASON; +import static com.datadog.debugger.util.MoshiSnapshotHelper.REDACTED_TYPE_REASON; + import com.datadog.debugger.el.Value; import datadog.trace.bootstrap.debugger.EvaluationError; import java.lang.reflect.Field; @@ -155,6 +158,12 @@ public void notCaptured(SerializerWithLimits.NotCapturedReason reason) { case FIELD_COUNT: sb.append(", ..."); break; + case REDACTED_IDENT: + sb.append("{").append(REDACTED_IDENT_REASON).append("}"); + break; + case REDACTED_TYPE: + sb.append("{").append(REDACTED_TYPE_REASON).append("}"); + break; default: throw new RuntimeException("Unsupported NotCapturedReason: " + reason); } diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/CapturedSnapshotTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/CapturedSnapshotTest.java index fed893eb71f..1b8ec443bce 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/CapturedSnapshotTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/CapturedSnapshotTest.java @@ -1,15 +1,23 @@ package com.datadog.debugger.agent; +import static com.datadog.debugger.util.LogProbeTestHelper.parseTemplate; import static com.datadog.debugger.util.MoshiSnapshotHelper.DEPTH_REASON; import static com.datadog.debugger.util.MoshiSnapshotHelper.FIELD_COUNT_REASON; import static com.datadog.debugger.util.MoshiSnapshotHelper.NOT_CAPTURED_REASON; +import static com.datadog.debugger.util.MoshiSnapshotHelper.REDACTED_IDENT_REASON; +import static com.datadog.debugger.util.MoshiSnapshotHelper.REDACTED_TYPE_REASON; import static com.datadog.debugger.util.TestHelper.setFieldInConfig; +import static datadog.trace.bootstrap.debugger.util.Redaction.REDACTED_VALUE; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static utils.InstrumentationTestHelper.compile; import static utils.InstrumentationTestHelper.compileAndLoadClass; @@ -18,15 +26,22 @@ import com.datadog.debugger.el.DSL; import com.datadog.debugger.el.ProbeCondition; +import com.datadog.debugger.el.values.StringValue; +import com.datadog.debugger.instrumentation.InstrumentationResult; import com.datadog.debugger.probe.LogProbe; +import com.datadog.debugger.probe.ProbeDefinition; +import com.datadog.debugger.sink.DebuggerSink; +import com.datadog.debugger.sink.ProbeStatusSink; import com.datadog.debugger.sink.Snapshot; import com.datadog.debugger.util.MoshiHelper; import com.datadog.debugger.util.MoshiSnapshotTestHelper; import com.datadog.debugger.util.SerializerWithLimits; import com.squareup.moshi.JsonAdapter; import datadog.trace.api.Config; +import datadog.trace.api.sampling.Sampler; import datadog.trace.bootstrap.debugger.*; import datadog.trace.bootstrap.debugger.el.ValueReferences; +import datadog.trace.bootstrap.debugger.util.Redaction; import groovy.lang.GroovyClassLoader; import java.io.File; import java.io.IOException; @@ -64,6 +79,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.ArgumentCaptor; import utils.SourceCompiler; public class CapturedSnapshotTest { @@ -71,6 +87,7 @@ public class CapturedSnapshotTest { private static final ProbeId PROBE_ID = new ProbeId("beae1807-f3b0-4ea8-a74f-826790c5e6f8", 0); private static final ProbeId PROBE_ID1 = new ProbeId("beae1807-f3b0-4ea8-a74f-826790c5e6f6", 0); private static final ProbeId PROBE_ID2 = new ProbeId("beae1807-f3b0-4ea8-a74f-826790c5e6f7", 0); + private static final ProbeId PROBE_ID3 = new ProbeId("beae1807-f3b0-4ea8-a74f-826790c5e6f8", 0); private static final String SERVICE_NAME = "service-name"; private static final JsonAdapter VALUE_ADAPTER = new MoshiSnapshotTestHelper.CapturedValueAdapter(); @@ -79,6 +96,8 @@ public class CapturedSnapshotTest { private Instrumentation instr = ByteBuddyAgent.install(); private ClassFileTransformer currentTransformer; + private ProbeStatusSink probeStatusSink; + private MockInstrumentationListener instrumentationListener; @BeforeEach public void before() { @@ -92,6 +111,7 @@ public void after() { } ProbeRateLimiter.resetAll(); Assertions.assertFalse(DebuggerContext.isInProbe()); + Redaction.clearUserDefinedTypes(); } @Test @@ -101,10 +121,9 @@ public void methodNotFound() throws IOException, URISyntaxException { installSingleProbe(CLASS_NAME, "foobar", null); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "2").get(); - Assertions.assertEquals(2, result); - Assertions.assertEquals( - "Cannot find method CapturedSnapshot01::foobar", - listener.errors.get(PROBE_ID.getId()).get(0).getMessage()); + assertEquals(2, result); + verify(probeStatusSink) + .addError(eq(PROBE_ID), eq("Cannot find method CapturedSnapshot01::foobar")); } @Test @@ -114,15 +133,15 @@ public void methodProbe() throws IOException, URISyntaxException { installSingleProbe(CLASS_NAME, "main", "int (java.lang.String)"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(3, result); + assertEquals(3, result); Snapshot snapshot = assertOneSnapshot(listener); - Assertions.assertNotNull(snapshot.getCaptures().getEntry()); - Assertions.assertNotNull(snapshot.getCaptures().getReturn()); + assertNotNull(snapshot.getCaptures().getEntry()); + assertNotNull(snapshot.getCaptures().getReturn()); assertCaptureArgs(snapshot.getCaptures().getEntry(), "arg", "java.lang.String", "1"); assertCaptureArgs(snapshot.getCaptures().getReturn(), "arg", "java.lang.String", "1"); assertTrue(snapshot.getDuration() > 0); assertTrue(snapshot.getStack().size() > 0); - Assertions.assertEquals("CapturedSnapshot01.main", snapshot.getStack().get(0).getFunction()); + assertEquals("CapturedSnapshot01.main", snapshot.getStack().get(0).getFunction()); } @Test @@ -132,15 +151,15 @@ public void singleLineProbe() throws IOException, URISyntaxException { installSingleProbe(CLASS_NAME, "main", "int (java.lang.String)", "8"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(3, result); + assertEquals(3, result); Snapshot snapshot = assertOneSnapshot(listener); - Assertions.assertNull(snapshot.getCaptures().getEntry()); - Assertions.assertNull(snapshot.getCaptures().getReturn()); + assertNull(snapshot.getCaptures().getEntry()); + assertNull(snapshot.getCaptures().getReturn()); Assertions.assertEquals(1, snapshot.getCaptures().getLines().size()); assertCaptureArgs(snapshot.getCaptures().getLines().get(8), "arg", "java.lang.String", "1"); assertCaptureLocals(snapshot.getCaptures().getLines().get(8), "var1", "int", "1"); assertTrue(snapshot.getStack().size() > 0); - Assertions.assertEquals("CapturedSnapshot01.java", snapshot.getStack().get(0).getFileName()); + assertEquals("CapturedSnapshot01.java", snapshot.getStack().get(0).getFileName()); } @Test @@ -152,8 +171,8 @@ public void resolutionFails() throws IOException, URISyntaxException { DebuggerContext.init((id, clazz) -> null, null); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(3, result); - Assertions.assertEquals(0, listener.snapshots.size()); + assertEquals(3, result); + assertEquals(0, listener.snapshots.size()); } @Test @@ -171,8 +190,8 @@ public void resolutionThrows() throws IOException, URISyntaxException { null); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(3, result); - Assertions.assertEquals(0, listener.snapshots.size()); + assertEquals(3, result); + assertEquals(0, listener.snapshots.size()); } @Test @@ -182,7 +201,7 @@ public void constructor() throws IOException, URISyntaxException { installSingleProbe(CLASS_NAME, "", "(String, Object)"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "f").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); assertOneSnapshot(listener); } @@ -193,7 +212,7 @@ public void overloadedConstructor() throws IOException, URISyntaxException { installSingleProbe(CLASS_NAME, "", "()"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "f").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); assertOneSnapshot(listener); } @@ -203,11 +222,23 @@ public void veryOldClassFile() throws Exception { DebuggerTransformerTest.TestSnapshotListener listener = installSingleProbe(CLASS_NAME, "", "()"); Class testClass = Class.forName(CLASS_NAME); - Assertions.assertNotNull(testClass); + assertNotNull(testClass); testClass.newInstance(); assertOneSnapshot(listener); } + @Test + public void oldJavacBug() throws Exception { + final String CLASS_NAME = "com.datadog.debugger.classfiles.JavacBug"; // compiled with jdk 1.6 + DebuggerTransformerTest.TestSnapshotListener listener = + installSingleProbe(CLASS_NAME, "main", null); + Class testClass = Class.forName(CLASS_NAME); + assertNotNull(testClass); + int result = Reflect.on(testClass).call("main", "").get(); + assertEquals(45, result); + assertEquals(0, listener.snapshots.size()); + } + @Test public void nestedConstructor() throws Exception { final String CLASS_NAME = "CapturedSnapshot02"; @@ -215,7 +246,7 @@ public void nestedConstructor() throws Exception { installSingleProbe(CLASS_NAME, "", "(Throwable)"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "init").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); assertOneSnapshot(listener); } @@ -226,7 +257,7 @@ public void nestedConstructor2() throws Exception { installSingleProbe(CLASS_NAME, "", "(int)"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); Snapshot snapshot = assertOneSnapshot(listener); } @@ -237,7 +268,7 @@ public void nestedConstructor3() throws Exception { installSingleProbe(CLASS_NAME, "", "(int, int, int)"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); Snapshot snapshot = assertOneSnapshot(listener); } @@ -248,14 +279,14 @@ public void inheritedConstructor() throws Exception { installSingleProbe(CLASS_NAME + "$Inherited", "", null); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); Snapshot snapshot = assertOneSnapshot(listener); assertCaptureFields( snapshot.getCaptures().getEntry(), "obj2", "java.lang.Object", (String) null); CapturedContext.CapturedValue obj2 = snapshot.getCaptures().getReturn().getFields().get("obj2"); Map fields = getFields(obj2); - Assertions.assertEquals(24, fields.get("intValue").getValue()); - Assertions.assertEquals(3.14, fields.get("doubleValue").getValue()); + assertEquals(24, fields.get("intValue").getValue()); + assertEquals(3.14, fields.get("doubleValue").getValue()); } @Test @@ -268,7 +299,7 @@ public void largeStackInheritedConstructor() throws Exception { createProbe(PROBE_ID2, CLASS_NAME, "", "(String, long, String)")); Class testClass = compileAndLoadClass(CLASS_NAME); long result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(4_000_000_001L, result); + assertEquals(4_000_000_001L, result); assertSnapshots(listener, 2, PROBE_ID2, PROBE_ID1); } @@ -282,7 +313,7 @@ public void multiMethods() throws IOException, URISyntaxException { createProbe(PROBE_ID2, CLASS_NAME, "f2", "(int)")); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(48, result); + assertEquals(48, result); List snapshots = assertSnapshots(listener, 2, PROBE_ID1, PROBE_ID2); Snapshot snapshot0 = snapshots.get(0); assertCaptureArgs(snapshot0.getCaptures().getEntry(), "value", "int", "31"); @@ -301,7 +332,7 @@ public void multiProbeSameMethod() throws IOException, URISyntaxException { installProbes(CLASS_NAME, probe, probe2); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(48, result); + assertEquals(48, result); List snapshots = assertSnapshots(listener, 2, PROBE_ID1, PROBE_ID2); Snapshot snapshot0 = snapshots.get(0); assertCaptureArgs(snapshot0.getCaptures().getEntry(), "value", "int", "31"); @@ -315,9 +346,9 @@ private List assertSnapshots( DebuggerTransformerTest.TestSnapshotListener listener, int expectedCount, ProbeId... probeIds) { - Assertions.assertEquals(expectedCount, listener.snapshots.size()); + assertEquals(expectedCount, listener.snapshots.size()); for (int i = 0; i < probeIds.length; i++) { - Assertions.assertEquals(probeIds[i].getId(), listener.snapshots.get(i).getProbe().getId()); + assertEquals(probeIds[i].getId(), listener.snapshots.get(i).getProbe().getId()); } return listener.snapshots; } @@ -329,7 +360,7 @@ public void catchBlock() throws IOException, URISyntaxException { installProbes(CLASS_NAME, createProbe(PROBE_ID, CLASS_NAME, "f", "()")); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "f").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); assertOneSnapshot(listener); } @@ -346,7 +377,7 @@ public void insideSynchronizedBlock() throws IOException, URISyntaxException { PROBE_ID, CLASS_NAME, "synchronizedBlock", "(int)", LINE_START + "-" + LINE_END)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "synchronizedBlock").get(); - Assertions.assertEquals(76, result); + assertEquals(76, result); List snapshots = assertSnapshots(listener, 10); int count = 31; for (int i = 0; i < 10; i++) { @@ -374,10 +405,10 @@ public void outsideSynchronizedBlock() throws IOException, URISyntaxException { PROBE_ID, CLASS_NAME, "synchronizedBlock", "(int)", LINE_START + "-" + LINE_END)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "synchronizedBlock").get(); - Assertions.assertEquals(76, result); + assertEquals(76, result); Snapshot snapshot = assertOneSnapshot(listener); - Assertions.assertNull(snapshot.getCaptures().getEntry()); - Assertions.assertNull(snapshot.getCaptures().getReturn()); + assertNull(snapshot.getCaptures().getEntry()); + assertNull(snapshot.getCaptures().getReturn()); Assertions.assertEquals(2, snapshot.getCaptures().getLines().size()); assertCaptureLocals(snapshot.getCaptures().getLines().get(LINE_START), "count", "int", "31"); assertCaptureLocals(snapshot.getCaptures().getLines().get(LINE_END), "count", "int", "76"); @@ -390,10 +421,10 @@ public void sourceFileProbe() throws IOException, URISyntaxException { installProbes(CLASS_NAME, createSourceFileProbe(PROBE_ID, CLASS_NAME + ".java", 4)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(48, result); + assertEquals(48, result); Snapshot snapshot = assertOneSnapshot(listener); - Assertions.assertNull(snapshot.getCaptures().getEntry()); - Assertions.assertNull(snapshot.getCaptures().getReturn()); + assertNull(snapshot.getCaptures().getEntry()); + assertNull(snapshot.getCaptures().getReturn()); Assertions.assertEquals(1, snapshot.getCaptures().getLines().size()); Assertions.assertEquals(CLASS_NAME, snapshot.getProbe().getLocation().getType()); Assertions.assertEquals("f1", snapshot.getProbe().getLocation().getMethod()); @@ -407,10 +438,10 @@ public void simpleSourceFileProbe() throws IOException, URISyntaxException { installProbes(CLASS_NAME, createSourceFileProbe(PROBE_ID, "CapturedSnapshot10.java", 11)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "2").get(); - Assertions.assertEquals(2, result); + assertEquals(2, result); Snapshot snapshot = assertOneSnapshot(listener); - Assertions.assertNull(snapshot.getCaptures().getEntry()); - Assertions.assertNull(snapshot.getCaptures().getReturn()); + assertNull(snapshot.getCaptures().getEntry()); + assertNull(snapshot.getCaptures().getReturn()); Assertions.assertEquals(1, snapshot.getCaptures().getLines().size()); Assertions.assertEquals(CLASS_NAME, snapshot.getProbe().getLocation().getType()); Assertions.assertEquals("main", snapshot.getProbe().getLocation().getMethod()); @@ -427,10 +458,10 @@ public void sourceFileProbeFullPath() throws IOException, URISyntaxException { createSourceFileProbe(PROBE_ID, "src/main/java/" + DIR_CLASS_NAME + ".java", 11)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "2").get(); - Assertions.assertEquals(2, result); + assertEquals(2, result); Snapshot snapshot = assertOneSnapshot(listener); - Assertions.assertNull(snapshot.getCaptures().getEntry()); - Assertions.assertNull(snapshot.getCaptures().getReturn()); + assertNull(snapshot.getCaptures().getEntry()); + assertNull(snapshot.getCaptures().getReturn()); Assertions.assertEquals(1, snapshot.getCaptures().getLines().size()); Assertions.assertEquals(CLASS_NAME, snapshot.getProbe().getLocation().getType()); Assertions.assertEquals("main", snapshot.getProbe().getLocation().getMethod()); @@ -447,10 +478,10 @@ public void sourceFileProbeFullPathTopLevelClass() throws IOException, URISyntax createSourceFileProbe(PROBE_ID, "src/main/java/" + DIR_CLASS_NAME + ".java", 21)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(42 * 42, result); + assertEquals(42 * 42, result); Snapshot snapshot = assertOneSnapshot(listener); - Assertions.assertNull(snapshot.getCaptures().getEntry()); - Assertions.assertNull(snapshot.getCaptures().getReturn()); + assertNull(snapshot.getCaptures().getEntry()); + assertNull(snapshot.getCaptures().getReturn()); Assertions.assertEquals(1, snapshot.getCaptures().getLines().size()); Assertions.assertEquals( "com.datadog.debugger.TopLevel01", snapshot.getProbe().getLocation().getType()); @@ -469,21 +500,21 @@ public void methodProbeLineProbeMix() throws IOException, URISyntaxException { createProbe(PROBE_ID2, CLASS_NAME, "main", null)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "2").get(); - Assertions.assertEquals(2, result); + assertEquals(2, result); List snapshots = assertSnapshots(listener, 2, PROBE_ID1, PROBE_ID2); Snapshot snapshot0 = snapshots.get(0); - Assertions.assertNull(snapshot0.getCaptures().getEntry()); - Assertions.assertNull(snapshot0.getCaptures().getReturn()); + assertNull(snapshot0.getCaptures().getEntry()); + assertNull(snapshot0.getCaptures().getReturn()); Assertions.assertEquals(1, snapshot0.getCaptures().getLines().size()); Assertions.assertEquals( "com.datadog.debugger.CapturedSnapshot11", snapshot0.getProbe().getLocation().getType()); - Assertions.assertEquals("main", snapshot0.getProbe().getLocation().getMethod()); + assertEquals("main", snapshot0.getProbe().getLocation().getMethod()); assertCaptureArgs(snapshot0.getCaptures().getLines().get(10), "arg", "java.lang.String", "2"); assertCaptureLocals(snapshot0.getCaptures().getLines().get(10), "var1", "int", "1"); Snapshot snapshot1 = snapshots.get(1); - Assertions.assertEquals( + assertEquals( "com.datadog.debugger.CapturedSnapshot11", snapshot1.getProbe().getLocation().getType()); - Assertions.assertEquals("main", snapshot1.getProbe().getLocation().getMethod()); + assertEquals("main", snapshot1.getProbe().getLocation().getMethod()); assertCaptureArgs(snapshot1.getCaptures().getEntry(), "arg", "java.lang.String", "2"); assertCaptureReturnValue(snapshot1.getCaptures().getReturn(), "int", "2"); } @@ -497,10 +528,10 @@ public void sourceFileProbeScala() throws IOException, URISyntaxException { String source = getFixtureContent("/" + FILE_NAME); Class testClass = ScalaHelper.compileAndLoad(source, CLASS_NAME, FILE_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(48, result); + assertEquals(48, result); Snapshot snapshot = assertOneSnapshot(listener); - Assertions.assertNull(snapshot.getCaptures().getEntry()); - Assertions.assertNull(snapshot.getCaptures().getReturn()); + assertNull(snapshot.getCaptures().getEntry()); + assertNull(snapshot.getCaptures().getReturn()); Assertions.assertEquals(1, snapshot.getCaptures().getLines().size()); Assertions.assertEquals(CLASS_NAME, snapshot.getProbe().getLocation().getType()); Assertions.assertEquals("f1", snapshot.getProbe().getLocation().getMethod()); @@ -516,10 +547,10 @@ public void sourceFileProbeGroovy() throws IOException, URISyntaxException { GroovyClassLoader groovyClassLoader = new GroovyClassLoader(); Class testClass = groovyClassLoader.parseClass(source); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(48, result); + assertEquals(48, result); Snapshot snapshot = assertOneSnapshot(listener); - Assertions.assertNull(snapshot.getCaptures().getEntry()); - Assertions.assertNull(snapshot.getCaptures().getReturn()); + assertNull(snapshot.getCaptures().getEntry()); + assertNull(snapshot.getCaptures().getReturn()); Assertions.assertEquals(1, snapshot.getCaptures().getLines().size()); Assertions.assertEquals(CLASS_NAME, snapshot.getProbe().getLocation().getType()); Assertions.assertEquals("f1", snapshot.getProbe().getLocation().getMethod()); @@ -532,16 +563,16 @@ public void sourceFileProbeKotlin() { DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, createSourceFileProbe(PROBE_ID, CLASS_NAME + ".kt", 4)); URL resource = CapturedSnapshotTest.class.getResource("/" + CLASS_NAME + ".kt"); - Assertions.assertNotNull(resource); + assertNotNull(resource); List filesToDelete = new ArrayList<>(); Class testClass = KotlinHelper.compileAndLoad(CLASS_NAME, resource.getFile(), filesToDelete); try { Object companion = Reflect.on(testClass).get("Companion"); int result = Reflect.on(companion).call("main", "").get(); - Assertions.assertEquals(48, result); + assertEquals(48, result); Snapshot snapshot = assertOneSnapshot(listener); - Assertions.assertNull(snapshot.getCaptures().getEntry()); - Assertions.assertNull(snapshot.getCaptures().getReturn()); + assertNull(snapshot.getCaptures().getEntry()); + assertNull(snapshot.getCaptures().getReturn()); Assertions.assertEquals(1, snapshot.getCaptures().getLines().size()); Assertions.assertEquals(CLASS_NAME, snapshot.getProbe().getLocation().getType()); Assertions.assertEquals("f1", snapshot.getProbe().getLocation().getMethod()); @@ -562,7 +593,7 @@ public void fieldExtractor() throws IOException, URISyntaxException { installProbes(CLASS_NAME, simpleDataProbe, compositeDataProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(143, result); + assertEquals(143, result); List snapshots = assertSnapshots(listener, 2, PROBE_ID1, PROBE_ID2); Snapshot simpleSnapshot = snapshots.get(0); Map expectedSimpleFields = new HashMap<>(); @@ -594,7 +625,7 @@ public void fieldExtractorDeep2() throws IOException, URISyntaxException { installProbes(CLASS_NAME, compositeDataProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(143, result); + assertEquals(143, result); Snapshot snapshot = assertOneSnapshot(listener); CapturedContext.CapturedValue returnValue = snapshot.getCaptures().getReturn().getLocals().get("@return"); @@ -604,11 +635,10 @@ public void fieldExtractorDeep2() throws IOException, URISyntaxException { CapturedContext.CapturedValue s1 = fields.get("s1"); Map s1Fields = (Map) s1.getValue(); - Assertions.assertEquals("101", String.valueOf(s1Fields.get("intValue").getValue())); - Assertions.assertEquals("foo1", s1Fields.get("strValue").getValue()); - Assertions.assertEquals("null", String.valueOf(s1Fields.get("listValue").getValue())); - Assertions.assertEquals( - DEPTH_REASON, String.valueOf(s1Fields.get("listValue").getNotCapturedReason())); + assertEquals("101", String.valueOf(s1Fields.get("intValue").getValue())); + assertEquals("foo1", s1Fields.get("strValue").getValue()); + assertEquals("null", String.valueOf(s1Fields.get("listValue").getValue())); + assertEquals(DEPTH_REASON, String.valueOf(s1Fields.get("listValue").getNotCapturedReason())); } @Test @@ -620,7 +650,7 @@ public void fieldExtractorLength() throws IOException, URISyntaxException { installProbes(CLASS_NAME, simpleDataProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(143, result); + assertEquals(143, result); Snapshot snapshot = assertOneSnapshot(listener); Map expectedFields = new HashMap<>(); expectedFields.put("intValue", "42"); @@ -639,14 +669,13 @@ public void fieldExtractorDisabled() throws IOException, URISyntaxException { installProbes(CLASS_NAME, simpleDataProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(143, result); + assertEquals(143, result); Snapshot snapshot = assertOneSnapshot(listener); CapturedContext.CapturedValue simpleData = snapshot.getCaptures().getReturn().getLocals().get("simpleData"); Map fields = getFields(simpleData); - Assertions.assertEquals(1, fields.size()); - Assertions.assertEquals( - DEPTH_REASON, fields.get("@" + NOT_CAPTURED_REASON).getNotCapturedReason()); + assertEquals(1, fields.size()); + assertEquals(DEPTH_REASON, fields.get("@" + NOT_CAPTURED_REASON).getNotCapturedReason()); } @Test @@ -658,13 +687,13 @@ public void fieldExtractorDepth0() throws IOException, URISyntaxException { installProbes(CLASS_NAME, simpleDataProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(143, result); + assertEquals(143, result); Snapshot snapshot = assertOneSnapshot(listener); CapturedContext.CapturedValue simpleData = snapshot.getCaptures().getReturn().getLocals().get("simpleData"); Map simpleDataFields = getFields(simpleData); - Assertions.assertEquals(1, simpleDataFields.size()); - Assertions.assertEquals( + assertEquals(1, simpleDataFields.size()); + assertEquals( DEPTH_REASON, simpleDataFields.get("@" + NOT_CAPTURED_REASON).getNotCapturedReason()); } @@ -677,15 +706,15 @@ public void fieldExtractorDepth1() throws IOException, URISyntaxException { installProbes(CLASS_NAME, simpleDataProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(143, result); + assertEquals(143, result); Snapshot snapshot = assertOneSnapshot(listener); CapturedContext.CapturedValue simpleData = snapshot.getCaptures().getReturn().getLocals().get("simpleData"); Map simpleDataFields = getFields(simpleData); - Assertions.assertEquals(4, simpleDataFields.size()); - Assertions.assertEquals("foo", simpleDataFields.get("strValue").getValue()); - Assertions.assertEquals(42, simpleDataFields.get("intValue").getValue()); - Assertions.assertEquals(DEPTH_REASON, simpleDataFields.get("listValue").getNotCapturedReason()); + assertEquals(4, simpleDataFields.size()); + assertEquals("foo", simpleDataFields.get("strValue").getValue()); + assertEquals(42, simpleDataFields.get("intValue").getValue()); + assertEquals(DEPTH_REASON, simpleDataFields.get("listValue").getNotCapturedReason()); } @Test @@ -698,29 +727,28 @@ public void fieldExtractorCount2() throws IOException, URISyntaxException { installProbes(CLASS_NAME, compositeDataProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(143, result); + assertEquals(143, result); Snapshot snapshot = assertOneSnapshot(listener); CapturedContext.CapturedValue returnValue = snapshot.getCaptures().getReturn().getLocals().get("@return"); - Assertions.assertEquals("CapturedSnapshot04$CompositeData", returnValue.getType()); + assertEquals("CapturedSnapshot04$CompositeData", returnValue.getType()); Map fields = getFields(returnValue); - Assertions.assertEquals(3, fields.size()); - Assertions.assertEquals( - FIELD_COUNT_REASON, fields.get("@" + NOT_CAPTURED_REASON).getNotCapturedReason()); + assertEquals(3, fields.size()); + assertEquals(FIELD_COUNT_REASON, fields.get("@" + NOT_CAPTURED_REASON).getNotCapturedReason()); Map s1Fields = (Map) fields.get("s1").getValue(); - Assertions.assertEquals("foo1", s1Fields.get("strValue").getValue()); - Assertions.assertEquals(101, s1Fields.get("intValue").getValue()); + assertEquals("foo1", s1Fields.get("strValue").getValue()); + assertEquals(101, s1Fields.get("intValue").getValue()); Map s2Fields = (Map) fields.get("s2").getValue(); - Assertions.assertEquals("foo2", s2Fields.get("strValue").getValue()); - Assertions.assertEquals(202, s2Fields.get("intValue").getValue()); + assertEquals("foo2", s2Fields.get("strValue").getValue()); + assertEquals(202, s2Fields.get("intValue").getValue()); CapturedContext.CapturedValue compositeData = snapshot.getCaptures().getReturn().getLocals().get("compositeData"); Map compositeDataFields = getFields(compositeData); - Assertions.assertEquals(3, compositeDataFields.size()); - Assertions.assertEquals( + assertEquals(3, compositeDataFields.size()); + assertEquals( FIELD_COUNT_REASON, compositeDataFields.get("@" + NOT_CAPTURED_REASON).getNotCapturedReason()); assertTrue(compositeDataFields.containsKey("s1")); @@ -738,7 +766,7 @@ public void uncaughtException() throws IOException, URISyntaxException { Reflect.on(testClass).call("main", "triggerUncaughtException").get(); Assertions.fail("should not reach this code"); } catch (ReflectException ex) { - Assertions.assertEquals("oops", ex.getCause().getCause().getMessage()); + assertEquals("oops", ex.getCause().getCause().getMessage()); } Snapshot snapshot = assertOneSnapshot(listener); assertCaptureThrowable( @@ -757,7 +785,7 @@ public void caughtException() throws IOException, URISyntaxException { CLASS_NAME, createProbe(PROBE_ID, CLASS_NAME, "triggerCaughtException", "()")); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "triggerCaughtException").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); Snapshot snapshot = assertOneSnapshot(listener); assertCaptureThrowable( snapshot.getCaptures().getCaughtExceptions().get(0), @@ -781,7 +809,7 @@ public void rateLimitSnapshot() throws IOException, URISyntaxException { Class testClass = compileAndLoadClass(CLASS_NAME); for (int i = 0; i < 100; i++) { int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(3, result); + assertEquals(3, result); } assertTrue(listener.snapshots.size() < 20); } @@ -801,7 +829,7 @@ public void globalRateLimitSnapshot() throws IOException, URISyntaxException { Class testClass = compileAndLoadClass(CLASS_NAME); for (int i = 0; i < 100; i++) { int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(48, result); + assertEquals(48, result); } assertTrue(listener.snapshots.size() < 20, "actual snapshots: " + listener.snapshots.size()); } @@ -838,7 +866,7 @@ public void simpleConditionTest() throws IOException, URISyntaxException { int result = Reflect.on(testClass).call("main", String.valueOf(i)).get(); assertTrue((i == 2 && result == 2) || result == 3); } - Assertions.assertEquals(1, listener.snapshots.size()); + assertEquals(1, listener.snapshots.size()); assertCaptureArgs( listener.snapshots.get(0).getCaptures().getReturn(), "arg", "java.lang.String", "5"); } @@ -856,7 +884,7 @@ public void staticFieldCondition() throws IOException, URISyntaxException { DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, logProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "0").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); Snapshot snapshot = assertOneSnapshot(listener); Map staticFields = snapshot.getCaptures().getReturn().getStaticFields(); @@ -879,8 +907,8 @@ public void simpleFalseConditionTest() throws IOException, URISyntaxException { DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, logProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "0").get(); - Assertions.assertEquals(3, result); - Assertions.assertEquals(0, listener.snapshots.size()); + assertEquals(3, result); + assertEquals(0, listener.snapshots.size()); } @Test @@ -901,10 +929,10 @@ public void nullCondition() throws IOException, URISyntaxException { DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, logProbes); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(1, listener.snapshots.size()); + assertEquals(1, listener.snapshots.size()); List evaluationErrors = listener.snapshots.get(0).getEvaluationErrors(); Assertions.assertEquals(1, evaluationErrors.size()); - Assertions.assertEquals("fld", evaluationErrors.get(0).getExpr()); + Assertions.assertEquals("nullTyped.fld.fld", evaluationErrors.get(0).getExpr()); Assertions.assertEquals( "Cannot dereference to field: fld", evaluationErrors.get(0).getMessage()); } @@ -963,8 +991,8 @@ private List doMergedProbeConditions( installProbes(CLASS_NAME, probe1, probe2); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(3, result); - Assertions.assertEquals(expectedSnapshots, listener.snapshots.size()); + assertEquals(3, result); + assertEquals(expectedSnapshots, listener.snapshots.size()); return listener.snapshots; } @@ -983,7 +1011,7 @@ public void mergedProbesConditionMainErrorAdditionalFalse() List snapshots = doMergedProbeConditions(condition1, condition2, 1); List evaluationErrors = snapshots.get(0).getEvaluationErrors(); Assertions.assertEquals(1, evaluationErrors.size()); - Assertions.assertEquals("fld", evaluationErrors.get(0).getExpr()); + Assertions.assertEquals("nullTyped.fld.fld", evaluationErrors.get(0).getExpr()); Assertions.assertEquals( "Cannot dereference to field: fld", evaluationErrors.get(0).getMessage()); } @@ -1003,10 +1031,10 @@ public void mergedProbesConditionMainErrorAdditionalTrue() List snapshots = doMergedProbeConditions(condition1, condition2, 2); List evaluationErrors = snapshots.get(0).getEvaluationErrors(); Assertions.assertEquals(1, evaluationErrors.size()); - Assertions.assertEquals("fld", evaluationErrors.get(0).getExpr()); + Assertions.assertEquals("nullTyped.fld.fld", evaluationErrors.get(0).getExpr()); Assertions.assertEquals( "Cannot dereference to field: fld", evaluationErrors.get(0).getMessage()); - Assertions.assertNull(snapshots.get(1).getEvaluationErrors()); + assertNull(snapshots.get(1).getEvaluationErrors()); } @Test @@ -1024,7 +1052,7 @@ public void mergedProbesConditionMainFalseAdditionalError() List snapshots = doMergedProbeConditions(condition1, condition2, 1); List evaluationErrors = snapshots.get(0).getEvaluationErrors(); Assertions.assertEquals(1, evaluationErrors.size()); - Assertions.assertEquals("fld", evaluationErrors.get(0).getExpr()); + Assertions.assertEquals("nullTyped.fld.fld", evaluationErrors.get(0).getExpr()); Assertions.assertEquals( "Cannot dereference to field: fld", evaluationErrors.get(0).getMessage()); } @@ -1042,10 +1070,10 @@ public void mergedProbesConditionMainTrueAdditionalError() DSL.value("hello"))), "nullTyped.fld.fld.msg == 'hello'"); List snapshots = doMergedProbeConditions(condition1, condition2, 2); - Assertions.assertNull(snapshots.get(0).getEvaluationErrors()); + assertNull(snapshots.get(0).getEvaluationErrors()); List evaluationErrors = snapshots.get(1).getEvaluationErrors(); Assertions.assertEquals(1, evaluationErrors.size()); - Assertions.assertEquals("fld", evaluationErrors.get(0).getExpr()); + Assertions.assertEquals("nullTyped.fld.fld", evaluationErrors.get(0).getExpr()); Assertions.assertEquals( "Cannot dereference to field: fld", evaluationErrors.get(0).getMessage()); } @@ -1071,8 +1099,8 @@ public void mergedProbesConditionMixedLocation() throws IOException, URISyntaxEx int result = Reflect.on(testClass).call("main", "1").get(); Assertions.assertEquals(3, result); Assertions.assertEquals(2, listener.snapshots.size()); - Assertions.assertNull(listener.snapshots.get(0).getEvaluationErrors()); - Assertions.assertNull(listener.snapshots.get(1).getEvaluationErrors()); + assertNull(listener.snapshots.get(0).getEvaluationErrors()); + assertNull(listener.snapshots.get(1).getEvaluationErrors()); } @Test @@ -1082,7 +1110,7 @@ public void fields() throws IOException, URISyntaxException { installProbes(CLASS_NAME, createProbe(PROBE_ID, CLASS_NAME, "f", "()")); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "f").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); Snapshot snapshot = assertOneSnapshot(listener); assertCaptureFieldCount(snapshot.getCaptures().getEntry(), 5); assertCaptureFields(snapshot.getCaptures().getEntry(), "intValue", "int", "24"); @@ -1123,7 +1151,7 @@ public void inheritedFields() throws IOException, URISyntaxException { installProbes(INHERITED_CLASS_NAME, probe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "inherited").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); Snapshot snapshot = assertOneSnapshot(listener); // Only Declared fields in the current class are captured, not inherited fields assertCaptureFieldCount(snapshot.getCaptures().getEntry(), 5); @@ -1143,7 +1171,7 @@ public void staticFields() throws IOException, URISyntaxException { installSingleProbe(CLASS_NAME, "", "()"); Class testClass = compileAndLoadClass(CLASS_NAME); long result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(4_000_000_001L, result); + assertEquals(4_000_000_001L, result); Snapshot snapshot = assertOneSnapshot(listener); Map staticFields = snapshot.getCaptures().getEntry().getStaticFields(); @@ -1168,7 +1196,7 @@ public void staticInheritedFields() throws IOException, URISyntaxException { installProbes(INHERITED_CLASS_NAME, logProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "inherited").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); Snapshot snapshot = assertOneSnapshot(listener); Map staticFields = snapshot.getCaptures().getReturn().getStaticFields(); @@ -1188,10 +1216,10 @@ public void staticLambda() throws IOException, URISyntaxException { installProbes(CLASS_NAME, createProbe(PROBE_ID, CLASS_NAME, null, null, "33")); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "static", "email@address").get(); - Assertions.assertEquals(8, result); + assertEquals(8, result); Snapshot snapshot = assertOneSnapshot(listener); CapturedContext context = snapshot.getCaptures().getLines().get(33); - Assertions.assertNotNull(context); + assertNotNull(context); assertCaptureLocals(context, "idx", "int", "5"); } @@ -1205,10 +1233,10 @@ public void capturingLambda() throws IOException, URISyntaxException { installProbes(CLASS_NAME, createProbe(PROBE_ID, CLASS_NAME, null, null, "44")); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "capturing", "email@address").get(); - Assertions.assertEquals(8, result); + assertEquals(8, result); Snapshot snapshot = assertOneSnapshot(listener); CapturedContext context = snapshot.getCaptures().getLines().get(44); - Assertions.assertNotNull(context); + assertNotNull(context); assertCaptureLocals(context, "idx", "int", "5"); assertCaptureFields(context, "strValue", "java.lang.String", "email@address"); } @@ -1233,7 +1261,7 @@ public void tracerInstrumentedClass() throws Exception { // it's important there is no null key in this map, as Jackson is not happy about it // it's means here that argument names are not resolved correctly Assertions.assertFalse(arguments.containsKey(null)); - Assertions.assertEquals(4, arguments.size()); + assertEquals(4, arguments.size()); assertTrue(arguments.containsKey("this")); assertTrue(arguments.containsKey("apiKey")); assertTrue(arguments.containsKey("uriInfo")); @@ -1249,13 +1277,14 @@ public void noCodeMethods() throws Exception { installProbes(CLASS_NAME, nativeMethodProbe, abstractMethodProbe); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(1, result); - Assertions.assertEquals( - "Cannot instrument an abstract or native method", - listener.errors.get(PROBE_ID1.getId()).get(0).getMessage()); - Assertions.assertEquals( - "Cannot instrument an abstract or native method", - listener.errors.get(PROBE_ID2.getId()).get(0).getMessage()); + assertEquals(1, result); + ArgumentCaptor probeIdCaptor = ArgumentCaptor.forClass(ProbeId.class); + ArgumentCaptor strCaptor = ArgumentCaptor.forClass(String.class); + verify(probeStatusSink, times(2)).addError(probeIdCaptor.capture(), strCaptor.capture()); + assertEquals(PROBE_ID1.getId(), probeIdCaptor.getAllValues().get(0).getId()); + assertEquals("Cannot instrument an abstract or native method", strCaptor.getAllValues().get(0)); + assertEquals(PROBE_ID2.getId(), probeIdCaptor.getAllValues().get(1).getId()); + assertEquals("Cannot instrument an abstract or native method", strCaptor.getAllValues().get(1)); } @Test @@ -1270,7 +1299,7 @@ public void duplicateClassDefinition() throws Exception { DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, abstractMethodProbe); Class testClass = compileAndLoadClass(CLASS_NAME); - Assertions.assertNotNull(testClass); + assertNotNull(testClass); } @Test @@ -1280,7 +1309,7 @@ public void overloadedMethods() throws Exception { installProbes(CLASS_NAME, createProbe(PROBE_ID, CLASS_NAME, "overload", null)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(63, result); + assertEquals(63, result); List snapshots = assertSnapshots(listener, 4, PROBE_ID, PROBE_ID, PROBE_ID, PROBE_ID); assertCaptureReturnValue(snapshots.get(0).getCaptures().getReturn(), "int", "42"); assertCaptureArgs(snapshots.get(1).getCaptures().getEntry(), "s", "java.lang.String", "1"); @@ -1296,7 +1325,7 @@ public void noDebugInfoEmptyMethod() throws Exception { Map classFileBuffers = compile(CLASS_NAME, SourceCompiler.DebugInfo.NONE); Class testClass = loadClass(CLASS_NAME, classFileBuffers); int result = Reflect.on(testClass).call("main", "2").get(); - Assertions.assertEquals(48, result); + assertEquals(48, result); assertOneSnapshot(listener); } @@ -1313,8 +1342,8 @@ public void instrumentTheWorld() throws Exception { instr.removeTransformer(currentTransformer); } int result = Reflect.on(testClass).call("main", "2").get(); - Assertions.assertEquals(2, result); - Assertions.assertEquals(1, listener.snapshots.size()); + assertEquals(2, result); + assertEquals(1, listener.snapshots.size()); ProbeImplementation probeImplementation = listener.snapshots.get(0).getProbe(); assertTrue(probeImplementation.isCaptureSnapshot()); assertEquals("main", probeImplementation.getLocation().getMethod()); @@ -1335,8 +1364,8 @@ public void instrumentTheWorld_excludeClass(String excludeFileName) throws Excep instr.removeTransformer(currentTransformer); } int result = Reflect.on(testClass).call("main", "2").get(); - Assertions.assertEquals(2, result); - Assertions.assertEquals(0, listener.snapshots.size()); + assertEquals(2, result); + assertEquals(0, listener.snapshots.size()); } @Test @@ -1346,7 +1375,7 @@ public void objectDynamicType() throws IOException, URISyntaxException { installProbes(CLASS_NAME, createProbe(PROBE_ID, CLASS_NAME, "processWithArg", null)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "2").get(); - Assertions.assertEquals(50, result); + assertEquals(50, result); Snapshot snapshot = assertOneSnapshot(listener); assertCaptureArgs(snapshot.getCaptures().getEntry(), "obj", "java.lang.Integer", "42"); assertCaptureFields( @@ -1362,7 +1391,7 @@ public void exceptionAsLocalVariable() throws IOException, URISyntaxException { installProbes(CLASS_NAME, createProbe(PROBE_ID, CLASS_NAME, null, null, "14")); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "2").get(); - Assertions.assertEquals(42, result); + assertEquals(42, result); Snapshot snapshot = assertOneSnapshot(listener); Map expectedFields = new HashMap<>(); expectedFields.put("detailMessage", "For input string: \"a\""); @@ -1385,7 +1414,7 @@ public void evaluateAtEntry() throws IOException, URISyntaxException { DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, logProbes); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(3, result); + assertEquals(3, result); assertOneSnapshot(listener); } @@ -1402,7 +1431,7 @@ public void evaluateAtExit() throws IOException, URISyntaxException { DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, logProbes); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(3, result); + assertEquals(3, result); assertOneSnapshot(listener); } @@ -1419,10 +1448,10 @@ public void evaluateAtExitFalse() throws IOException, URISyntaxException { DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, logProbes); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - Assertions.assertEquals(3, result); - Assertions.assertEquals(0, listener.snapshots.size()); + assertEquals(3, result); + assertEquals(0, listener.snapshots.size()); assertTrue(listener.skipped); - Assertions.assertEquals(DebuggerContext.SkipCause.CONDITION, listener.cause); + assertEquals(DebuggerContext.SkipCause.CONDITION, listener.cause); } @Test @@ -1439,7 +1468,7 @@ public void uncaughtExceptionConditionLocalVar() throws IOException, URISyntaxEx Reflect.on(testClass).call("main", "triggerUncaughtException").get(); Assertions.fail("should not reach this code"); } catch (ReflectException ex) { - Assertions.assertEquals("oops", ex.getCause().getCause().getMessage()); + assertEquals("oops", ex.getCause().getCause().getMessage()); } Snapshot snapshot = assertOneSnapshot(listener); assertCaptureThrowable( @@ -1448,10 +1477,9 @@ public void uncaughtExceptionConditionLocalVar() throws IOException, URISyntaxEx "oops", "CapturedSnapshot05.triggerUncaughtException", 7); - Assertions.assertEquals(2, snapshot.getEvaluationErrors().size()); - Assertions.assertEquals( - "Cannot find symbol: after", snapshot.getEvaluationErrors().get(0).getMessage()); - Assertions.assertEquals( + assertEquals(2, snapshot.getEvaluationErrors().size()); + assertEquals("Cannot find symbol: after", snapshot.getEvaluationErrors().get(0).getMessage()); + assertEquals( "java.lang.IllegalStateException: oops", snapshot.getEvaluationErrors().get(1).getMessage()); } @@ -1464,7 +1492,7 @@ public void enumConstructorArgs() throws IOException, URISyntaxException { installProbes(ENUM_CLASS, createProbe(PROBE_ID, ENUM_CLASS, "", null)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(2, result); + assertEquals(2, result); assertSnapshots(listener, 3, PROBE_ID); Map arguments = listener.snapshots.get(0).getCaptures().getEntry().getArguments(); @@ -1481,7 +1509,7 @@ public void enumValues() throws IOException, URISyntaxException { installProbes(CLASS_NAME, createProbe(PROBE_ID, CLASS_NAME, "convert", null)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "2").get(); - Assertions.assertEquals(2, result); + assertEquals(2, result); Snapshot snapshot = assertOneSnapshot(listener); assertCaptureReturnValue( snapshot.getCaptures().getReturn(), @@ -1497,7 +1525,7 @@ public void recursiveCapture() throws IOException, URISyntaxException { installProbes(INNER_CLASS, createProbe(PROBE_ID, INNER_CLASS, "size", null)); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "").get(); - Assertions.assertEquals(1, result); + assertEquals(1, result); } @Test @@ -1511,10 +1539,45 @@ public void recursiveCaptureException() throws IOException, URISyntaxException { Reflect.on(testClass).call("main", "exception").get(); Assertions.fail("should not reach this code"); } catch (ReflectException ex) { - Assertions.assertEquals("not supported", ex.getCause().getCause().getMessage()); + assertEquals("not supported", ex.getCause().getCause().getMessage()); } } + @Test + public void unknownCollectionCount() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot24"; + Snapshot snapshot = doUnknownCount(CLASS_NAME); + assertEquals( + "Unsupported Collection class: com.datadog.debugger.CapturedSnapshot24$Holder", + snapshot.getEvaluationErrors().get(0).getMessage()); + } + + @Test + public void unknownMapCount() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot26"; + Snapshot snapshot = doUnknownCount(CLASS_NAME); + assertEquals( + "Unsupported Map class: com.datadog.debugger.CapturedSnapshot26$Holder", + snapshot.getEvaluationErrors().get(0).getMessage()); + } + + private Snapshot doUnknownCount(String CLASS_NAME) throws IOException, URISyntaxException { + LogProbe logProbe = + createProbeBuilder(PROBE_ID, CLASS_NAME, "doit", null) + .when( + new ProbeCondition( + DSL.when(DSL.ge(DSL.len(DSL.ref("holder")), DSL.value(0))), "len(holder) >= 0")) + .build(); + DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, logProbe); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "").get(); + Assertions.assertEquals(1, result); + Snapshot snapshot = assertOneSnapshot(listener); + assertEquals(1, snapshot.getEvaluationErrors().size()); + assertEquals("len(holder)", snapshot.getEvaluationErrors().get(0).getExpr()); + return snapshot; + } + @Test public void beforeForLoopLineProbe() throws IOException, URISyntaxException { final String CLASS_NAME = "CapturedSnapshot02"; @@ -1522,11 +1585,283 @@ public void beforeForLoopLineProbe() throws IOException, URISyntaxException { installSingleProbe(CLASS_NAME, null, null, "46"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "synchronizedBlock").get(); - Assertions.assertEquals(76, result); + assertEquals(76, result); Snapshot snapshot = assertOneSnapshot(listener); assertCaptureLocals(snapshot.getCaptures().getLines().get(46), "count", "int", "31"); } + @Test + public void dupLineProbeSameTemplate() throws IOException, URISyntaxException { + final String CLASS_NAME = "CapturedSnapshot08"; + final String LOG_TEMPLATE = "msg1={typed.fld.fld.msg}"; + LogProbe probe1 = + createProbeBuilder(PROBE_ID1, CLASS_NAME, null, null, "39") + .template(LOG_TEMPLATE, parseTemplate(LOG_TEMPLATE)) + .build(); + LogProbe probe2 = + createProbeBuilder(PROBE_ID2, CLASS_NAME, null, null, "39") + .template(LOG_TEMPLATE, parseTemplate(LOG_TEMPLATE)) + .build(); + DebuggerTransformerTest.TestSnapshotListener listener = + installProbes(CLASS_NAME, probe1, probe2); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "1").get(); + assertEquals(3, result); + List snapshots = assertSnapshots(listener, 2, PROBE_ID1, PROBE_ID2); + for (Snapshot snapshot : snapshots) { + assertEquals("msg1=hello", snapshot.getMessage()); + } + } + + @Test + public void keywordRedaction() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot27"; + final String LOG_TEMPLATE = + "arg={arg} secret={secret} password={this.password} fromMap={strMap['password']}"; + LogProbe probe1 = + createProbeBuilder(PROBE_ID, CLASS_NAME, "doit", null) + .template(LOG_TEMPLATE, parseTemplate(LOG_TEMPLATE)) + .captureSnapshot(true) + .evaluateAt(MethodLocation.EXIT) + .build(); + DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, probe1); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "secret123").get(); + Assertions.assertEquals(42, result); + Snapshot snapshot = assertOneSnapshot(listener); + assertEquals( + "arg=secret123 secret={" + + REDACTED_VALUE + + "} password={" + + REDACTED_VALUE + + "} fromMap={" + + REDACTED_VALUE + + "}", + snapshot.getMessage()); + CapturedContext.CapturedValue secretLocalVar = + snapshot.getCaptures().getReturn().getLocals().get("secret"); + CapturedContext.CapturedValue secretValued = + VALUE_ADAPTER.fromJson(secretLocalVar.getStrValue()); + assertEquals(REDACTED_IDENT_REASON, secretValued.getNotCapturedReason()); + Map thisFields = + getFields(snapshot.getCaptures().getReturn().getArguments().get("this")); + CapturedContext.CapturedValue passwordField = thisFields.get("password"); + assertEquals(REDACTED_IDENT_REASON, passwordField.getNotCapturedReason()); + Map strMap = (Map) thisFields.get("strMap").getValue(); + assertNull(strMap.get("password")); + } + + @Test + public void keywordRedactionConditions() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot27"; + LogProbe probe1 = + createProbeBuilder(PROBE_ID1, CLASS_NAME, "doit", null) + .when( + new ProbeCondition( + DSL.when( + DSL.contains( + DSL.getMember(DSL.ref("this"), "password"), new StringValue("123"))), + "contains(this.password, '123')")) + .captureSnapshot(true) + .evaluateAt(MethodLocation.EXIT) + .build(); + LogProbe probe2 = + createProbeBuilder(PROBE_ID2, CLASS_NAME, "doit", null) + .when( + new ProbeCondition( + DSL.when(DSL.eq(DSL.ref("password"), DSL.value("123"))), "password == '123'")) + .captureSnapshot(true) + .evaluateAt(MethodLocation.EXIT) + .build(); + LogProbe probe3 = + createProbeBuilder(PROBE_ID3, CLASS_NAME, "doit", null) + .when( + new ProbeCondition( + DSL.when( + DSL.eq( + DSL.index(DSL.ref("strMap"), DSL.value("password")), DSL.value("123"))), + "strMap['password'] == '123'")) + .captureSnapshot(true) + .evaluateAt(MethodLocation.EXIT) + .build(); + DebuggerTransformerTest.TestSnapshotListener listener = + installProbes(CLASS_NAME, probe1, probe2, probe3); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "secret123").get(); + Assertions.assertEquals(42, result); + List snapshots = assertSnapshots(listener, 3, PROBE_ID1, PROBE_ID2, PROBE_ID3); + assertEquals(1, snapshots.get(0).getEvaluationErrors().size()); + assertEquals( + "Could not evaluate the expression because 'this.password' was redacted", + snapshots.get(0).getEvaluationErrors().get(0).getMessage()); + assertEquals(1, snapshots.get(1).getEvaluationErrors().size()); + assertEquals( + "Could not evaluate the expression because 'password' was redacted", + snapshots.get(1).getEvaluationErrors().get(0).getMessage()); + assertEquals(1, snapshots.get(2).getEvaluationErrors().size()); + assertEquals( + "Could not evaluate the expression because 'strMap[\"password\"]' was redacted", + snapshots.get(2).getEvaluationErrors().get(0).getMessage()); + } + + @Test + public void typeRedactionBlockedProbe() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot27"; + Config config = mock(Config.class); + when(config.getDebuggerRedactedTypes()).thenReturn("com.datadog.debugger.CapturedSnapshot27"); + Redaction.addUserDefinedTypes(config); + LogProbe probe1 = + createProbeBuilder(PROBE_ID, CLASS_NAME, "doit", null) + .captureSnapshot(true) + .evaluateAt(MethodLocation.EXIT) + .build(); + DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, probe1); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "secret123").get(); + Assertions.assertEquals(42, result); + assertEquals(0, listener.snapshots.size()); + InstrumentationResult instrumentationResult = + instrumentationListener.results.get(PROBE_ID.getId()); + assertTrue(instrumentationResult.isBlocked()); + } + + @Test + public void typeRedactionSnapshot() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot27"; + final String LOG_TEMPLATE = + "arg={arg} credentials={creds} user={this.creds.user} code={creds.secretCode} dave={credMap['dave'].user}"; + Config config = mock(Config.class); + when(config.getDebuggerRedactedTypes()) + .thenReturn("com.datadog.debugger.CapturedSnapshot27$Creds"); + Redaction.addUserDefinedTypes(config); + LogProbe probe1 = + createProbeBuilder(PROBE_ID, CLASS_NAME, "doit", null) + .template(LOG_TEMPLATE, parseTemplate(LOG_TEMPLATE)) + .captureSnapshot(true) + .evaluateAt(MethodLocation.EXIT) + .build(); + DebuggerTransformerTest.TestSnapshotListener listener = installProbes(CLASS_NAME, probe1); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "secret123").get(); + Assertions.assertEquals(42, result); + Snapshot snapshot = assertOneSnapshot(listener); + assertEquals( + "arg=secret123 credentials={" + + REDACTED_VALUE + + "} user={" + + REDACTED_VALUE + + "} code={" + + REDACTED_VALUE + + "} dave={" + + REDACTED_VALUE + + "}", + snapshot.getMessage()); + Map thisFields = + getFields(snapshot.getCaptures().getReturn().getArguments().get("this")); + CapturedContext.CapturedValue credsField = thisFields.get("creds"); + assertEquals(REDACTED_TYPE_REASON, credsField.getNotCapturedReason()); + Map credMap = (Map) thisFields.get("credMap").getValue(); + assertNull(credMap.get("dave")); + } + + @Test + public void typeRedactionCondition() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot27"; + Config config = mock(Config.class); + when(config.getDebuggerRedactedTypes()) + .thenReturn("com.datadog.debugger.CapturedSnapshot27$Creds"); + Redaction.addUserDefinedTypes(config); + LogProbe probe1 = + createProbeBuilder(PROBE_ID1, CLASS_NAME, "doit", null) + .when( + new ProbeCondition( + DSL.when( + DSL.contains( + DSL.getMember(DSL.getMember(DSL.ref("this"), "creds"), "secretCode"), + new StringValue("123"))), + "contains(this.creds.secretCode, '123')")) + .captureSnapshot(true) + .evaluateAt(MethodLocation.EXIT) + .build(); + LogProbe probe2 = + createProbeBuilder(PROBE_ID2, CLASS_NAME, "doit", null) + .when( + new ProbeCondition( + DSL.when( + DSL.eq(DSL.getMember(DSL.ref("creds"), "secretCode"), DSL.value("123"))), + "creds.secretCode == '123'")) + .captureSnapshot(true) + .evaluateAt(MethodLocation.EXIT) + .build(); + LogProbe probe3 = + createProbeBuilder(PROBE_ID3, CLASS_NAME, "doit", null) + .when( + new ProbeCondition( + DSL.when( + DSL.eq(DSL.index(DSL.ref("credMap"), DSL.value("dave")), DSL.value("123"))), + "credMap['dave'] == '123'")) + .captureSnapshot(true) + .evaluateAt(MethodLocation.EXIT) + .build(); + DebuggerTransformerTest.TestSnapshotListener listener = + installProbes(CLASS_NAME, probe1, probe2, probe3); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "secret123").get(); + Assertions.assertEquals(42, result); + List snapshots = assertSnapshots(listener, 3, PROBE_ID1, PROBE_ID2, PROBE_ID3); + assertEquals(1, snapshots.get(0).getEvaluationErrors().size()); + assertEquals( + "Could not evaluate the expression because 'this.creds' was redacted", + snapshots.get(0).getEvaluationErrors().get(0).getMessage()); + assertEquals(1, snapshots.get(1).getEvaluationErrors().size()); + assertEquals( + "Could not evaluate the expression because 'creds' was redacted", + snapshots.get(1).getEvaluationErrors().get(0).getMessage()); + assertEquals(1, snapshots.get(2).getEvaluationErrors().size()); + assertEquals( + "Could not evaluate the expression because 'credMap[\"dave\"]' was redacted", + snapshots.get(2).getEvaluationErrors().get(0).getMessage()); + } + + @Test + public void samplingMethodProbe() throws IOException, URISyntaxException { + doSamplingTest(this::methodProbe, 1, 1); + } + + @Test + public void samplingProbeCondition() throws IOException, URISyntaxException { + doSamplingTest(this::simpleConditionTest, 1, 1); + } + + @Test + public void samplingDupMethodProbeCondition() throws IOException, URISyntaxException { + doSamplingTest(this::mergedProbesWithAdditionalProbeConditionTest, 2, 2); + } + + @Test + public void samplingLineProbe() throws IOException, URISyntaxException { + doSamplingTest(this::singleLineProbe, 1, 1); + } + + interface TestMethod { + void run() throws IOException, URISyntaxException; + } + + private void doSamplingTest(TestMethod testRun, int expectedGlobalCount, int expectedProbeCount) + throws IOException, URISyntaxException { + MockSampler probeSampler = new MockSampler(); + MockSampler globalSampler = new MockSampler(); + ProbeRateLimiter.setSamplerSupplier(rate -> rate < 101 ? probeSampler : globalSampler); + ProbeRateLimiter.setGlobalSnapshotRate(1000); + try { + testRun.run(); + } finally { + ProbeRateLimiter.setSamplerSupplier(null); + } + assertEquals(expectedGlobalCount, globalSampler.callCount); + assertEquals(expectedProbeCount, probeSampler.callCount); + } + private DebuggerTransformerTest.TestSnapshotListener setupInstrumentTheWorldTransformer( String excludeFileName) { Config config = mock(Config.class); @@ -1534,11 +1869,16 @@ private DebuggerTransformerTest.TestSnapshotListener setupInstrumentTheWorldTran when(config.isDebuggerClassFileDumpEnabled()).thenReturn(true); when(config.isDebuggerInstrumentTheWorld()).thenReturn(true); when(config.getDebuggerExcludeFiles()).thenReturn(excludeFileName); + when(config.getFinalDebuggerSnapshotUrl()) + .thenReturn("http://localhost:8126/debugger/v1/input"); + when(config.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost:8126/symdb/v1/input"); + when(config.getDebuggerUploadBatchSize()).thenReturn(100); DebuggerTransformerTest.TestSnapshotListener listener = new DebuggerTransformerTest.TestSnapshotListener(); DebuggerAgentHelper.injectSink(listener); currentTransformer = - DebuggerAgent.setupInstrumentTheWorldTransformer(config, instr, listener, null); + DebuggerAgent.setupInstrumentTheWorldTransformer( + config, instr, new DebuggerSink(config), null); DebuggerContext.initClassFilter(new DenyListHelper(null)); return listener; } @@ -1558,9 +1898,9 @@ private void setCorrelationSingleton(Object instance) { } private Snapshot assertOneSnapshot(DebuggerTransformerTest.TestSnapshotListener listener) { - Assertions.assertEquals(1, listener.snapshots.size()); + assertEquals(1, listener.snapshots.size()); Snapshot snapshot = listener.snapshots.get(0); - Assertions.assertEquals(PROBE_ID.getId(), snapshot.getProbe().getId()); + assertEquals(PROBE_ID.getId(), snapshot.getProbe().getId()); return snapshot; } @@ -1576,8 +1916,18 @@ private DebuggerTransformerTest.TestSnapshotListener installProbes( when(config.isDebuggerEnabled()).thenReturn(true); when(config.isDebuggerClassFileDumpEnabled()).thenReturn(true); when(config.isDebuggerVerifyByteCode()).thenReturn(true); + when(config.getFinalDebuggerSnapshotUrl()) + .thenReturn("http://localhost:8126/debugger/v1/input"); + when(config.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost:8126/symdb/v1/input"); Collection logProbes = configuration.getLogProbes(); - currentTransformer = new DebuggerTransformer(config, configuration, null); + instrumentationListener = new MockInstrumentationListener(); + probeStatusSink = mock(ProbeStatusSink.class); + currentTransformer = + new DebuggerTransformer( + config, + configuration, + instrumentationListener, + new DebuggerSink(config, probeStatusSink)); instr.addTransformer(currentTransformer); DebuggerTransformerTest.TestSnapshotListener listener = new DebuggerTransformerTest.TestSnapshotListener(); @@ -1600,7 +1950,7 @@ private DebuggerTransformerTest.TestSnapshotListener installProbes( private ProbeImplementation resolver( String id, Class callingClass, String expectedClassName, Collection logProbes) { - Assertions.assertEquals(expectedClassName, callingClass.getName()); + assertEquals(expectedClassName, callingClass.getName()); for (LogProbe probe : logProbes) { if (probe.getId().equals(id)) { return probe; @@ -1622,30 +1972,29 @@ private DebuggerTransformerTest.TestSnapshotListener installProbes( private void assertCaptureArgs( CapturedContext context, String name, String typeName, String value) { CapturedContext.CapturedValue capturedValue = context.getArguments().get(name); - Assertions.assertEquals(typeName, capturedValue.getType()); - Assertions.assertEquals(value, getValue(capturedValue)); + assertEquals(typeName, capturedValue.getType()); + assertEquals(value, getValue(capturedValue)); } private void assertCaptureLocals( CapturedContext context, String name, String typeName, String value) { CapturedContext.CapturedValue localVar = context.getLocals().get(name); - Assertions.assertEquals(typeName, localVar.getType()); - Assertions.assertEquals(value, getValue(localVar)); + assertEquals(typeName, localVar.getType()); + assertEquals(value, getValue(localVar)); } private void assertCaptureLocals( CapturedContext context, String name, String typeName, Map expectedFields) { CapturedContext.CapturedValue localVar = context.getLocals().get(name); - Assertions.assertEquals(typeName, localVar.getType()); + assertEquals(typeName, localVar.getType()); Map fields = getFields(localVar); for (Map.Entry entry : expectedFields.entrySet()) { assertTrue(fields.containsKey(entry.getKey())); CapturedContext.CapturedValue fieldCapturedValue = fields.get(entry.getKey()); if (fieldCapturedValue.getNotCapturedReason() != null) { - Assertions.assertEquals( - entry.getValue(), String.valueOf(fieldCapturedValue.getNotCapturedReason())); + assertEquals(entry.getValue(), String.valueOf(fieldCapturedValue.getNotCapturedReason())); } else { - Assertions.assertEquals(entry.getValue(), String.valueOf(fieldCapturedValue.getValue())); + assertEquals(entry.getValue(), String.valueOf(fieldCapturedValue.getValue())); } } } @@ -1653,18 +2002,18 @@ private void assertCaptureLocals( private void assertCaptureFields( CapturedContext context, String name, String typeName, String value) { CapturedContext.CapturedValue field = context.getFields().get(name); - Assertions.assertEquals(typeName, field.getType()); - Assertions.assertEquals(value, getValue(field)); + assertEquals(typeName, field.getType()); + assertEquals(value, getValue(field)); } private void assertCaptureFields( CapturedContext context, String name, String typeName, Collection collection) { CapturedContext.CapturedValue field = context.getFields().get(name); - Assertions.assertEquals(typeName, field.getType()); + assertEquals(typeName, field.getType()); Iterator iterator = collection.iterator(); for (Object obj : getCollection(field)) { if (iterator.hasNext()) { - Assertions.assertEquals(iterator.next(), obj); + assertEquals(iterator.next(), obj); } else { Assertions.fail("not same number of elements"); } @@ -1674,38 +2023,37 @@ private void assertCaptureFields( private void assertCaptureFields( CapturedContext context, String name, String typeName, Map expectedMap) { CapturedContext.CapturedValue field = context.getFields().get(name); - Assertions.assertEquals(typeName, field.getType()); + assertEquals(typeName, field.getType()); Map map = getMap(field); - Assertions.assertEquals(expectedMap.size(), map.size()); + assertEquals(expectedMap.size(), map.size()); for (Map.Entry entry : map.entrySet()) { assertTrue(expectedMap.containsKey(entry.getKey())); - Assertions.assertEquals(expectedMap.get(entry.getKey()), entry.getValue()); + assertEquals(expectedMap.get(entry.getKey()), entry.getValue()); } } private void assertCaptureFieldCount(CapturedContext context, int expectedFieldCount) { - Assertions.assertEquals(expectedFieldCount, context.getFields().size()); + assertEquals(expectedFieldCount, context.getFields().size()); } private void assertCaptureReturnValue(CapturedContext context, String typeName, String value) { CapturedContext.CapturedValue returnValue = context.getLocals().get("@return"); - Assertions.assertEquals(typeName, returnValue.getType()); - Assertions.assertEquals(value, getValue(returnValue)); + assertEquals(typeName, returnValue.getType()); + assertEquals(value, getValue(returnValue)); } private void assertCaptureReturnValue( CapturedContext context, String typeName, Map expectedFields) { CapturedContext.CapturedValue returnValue = context.getLocals().get("@return"); - Assertions.assertEquals(typeName, returnValue.getType()); + assertEquals(typeName, returnValue.getType()); Map fields = getFields(returnValue); for (Map.Entry entry : expectedFields.entrySet()) { assertTrue(fields.containsKey(entry.getKey())); CapturedContext.CapturedValue fieldCapturedValue = fields.get(entry.getKey()); if (fieldCapturedValue.getNotCapturedReason() != null) { - Assertions.assertEquals( - entry.getValue(), String.valueOf(fieldCapturedValue.getNotCapturedReason())); + assertEquals(entry.getValue(), String.valueOf(fieldCapturedValue.getNotCapturedReason())); } else { - Assertions.assertEquals(entry.getValue(), String.valueOf(fieldCapturedValue.getValue())); + assertEquals(entry.getValue(), String.valueOf(fieldCapturedValue.getValue())); } } } @@ -1722,13 +2070,13 @@ private void assertCaptureThrowable( String message, String methodName, int lineNumber) { - Assertions.assertNotNull(throwable); - Assertions.assertEquals(typeName, throwable.getType()); - Assertions.assertEquals(message, throwable.getMessage()); - Assertions.assertNotNull(throwable.getStacktrace()); + assertNotNull(throwable); + assertEquals(typeName, throwable.getType()); + assertEquals(message, throwable.getMessage()); + assertNotNull(throwable.getStacktrace()); Assertions.assertFalse(throwable.getStacktrace().isEmpty()); - Assertions.assertEquals(methodName, throwable.getStacktrace().get(0).getFunction()); - Assertions.assertEquals(lineNumber, throwable.getStacktrace().get(0).getLineNumber()); + assertEquals(methodName, throwable.getStacktrace().get(0).getFunction()); + assertEquals(lineNumber, throwable.getStacktrace().get(0).getLineNumber()); } private static String getValue(CapturedContext.CapturedValue capturedValue) { @@ -1841,6 +2189,36 @@ private static LogProbe createSourceFileProbe(ProbeId id, String sourceFile, int .build(); } + static class MockInstrumentationListener implements DebuggerTransformer.InstrumentationListener { + final Map results = new HashMap<>(); + + @Override + public void instrumentationResult(ProbeDefinition definition, InstrumentationResult result) { + results.put(definition.getId(), result); + } + } + + static class MockSampler implements Sampler { + + int callCount; + + @Override + public boolean sample() { + callCount++; + return true; + } + + @Override + public boolean keep() { + return false; + } + + @Override + public boolean drop() { + return false; + } + } + static class KotlinHelper { public static Class compileAndLoad( String className, String sourceFileName, List outputFilesToDelete) { diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ConfigurationTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ConfigurationTest.java index 0a0a1815da9..ce1c446c54f 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ConfigurationTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ConfigurationTest.java @@ -120,13 +120,15 @@ public void deserializeLogProbes() throws Exception { ArrayList logProbes = new ArrayList<>(config.getLogProbes()); assertEquals(1, logProbes.size()); LogProbe logProbe0 = logProbes.get(0); - assertEquals(6, logProbe0.getSegments().size()); + assertEquals(8, logProbe0.getSegments().size()); assertEquals("this is a log line customized! uuid=", logProbe0.getSegments().get(0).getStr()); assertEquals("uuid", logProbe0.getSegments().get(1).getExpr()); assertEquals(" result=", logProbe0.getSegments().get(2).getStr()); assertEquals("result", logProbe0.getSegments().get(3).getExpr()); assertEquals(" garbageStart=", logProbe0.getSegments().get(4).getStr()); assertEquals("garbageStart", logProbe0.getSegments().get(5).getExpr()); + assertEquals(" contain=", logProbe0.getSegments().get(6).getStr()); + assertEquals("contains(arg, 'foo')", logProbe0.getSegments().get(7).getExpr()); } @Test diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ConfigurationUpdaterTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ConfigurationUpdaterTest.java index dcadafd6e18..5261dfb892f 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ConfigurationUpdaterTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ConfigurationUpdaterTest.java @@ -60,6 +60,7 @@ public class ConfigurationUpdaterTest { void setUp() { lenient().when(tracerConfig.getFinalDebuggerSnapshotUrl()).thenReturn("http://localhost"); lenient().when(tracerConfig.getDebuggerUploadBatchSize()).thenReturn(100); + lenient().when(tracerConfig.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost"); debuggerSinkWithMockStatusSink = new DebuggerSink(tracerConfig, probeStatusSink); } @@ -376,7 +377,7 @@ public void acceptDeleteProbeSameClass() throws UnmodifiableClassException { ConfigurationUpdater configurationUpdater = new ConfigurationUpdater( inst, - (tracerConfig, configuration, listener) -> { + (tracerConfig, configuration, listener, debuggerSink) -> { assertEquals(expectedDefinitions.get(), configuration.getDefinitions().size()); return transformer; }, @@ -698,7 +699,8 @@ private static Configuration createAppLogs(List logProbes) { private DebuggerTransformer createTransformer( Config tracerConfig, Configuration configuration, - DebuggerTransformer.InstrumentationListener listener) { + DebuggerTransformer.InstrumentationListener listener, + DebuggerSink debuggerSink) { return transformer; } } diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/DebuggerTransformerTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/DebuggerTransformerTest.java index 5ec91f4e910..a10ba941959 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/DebuggerTransformerTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/DebuggerTransformerTest.java @@ -2,11 +2,18 @@ import static com.datadog.debugger.util.ClassFileHelperTest.getClassFileBytes; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.datadog.debugger.instrumentation.DiagnosticMessage; import com.datadog.debugger.instrumentation.InstrumentationResult; import com.datadog.debugger.probe.LogProbe; +import com.datadog.debugger.probe.ProbeDefinition; +import com.datadog.debugger.probe.SpanProbe; +import com.datadog.debugger.probe.Where; +import com.datadog.debugger.sink.DebuggerSink; +import com.datadog.debugger.sink.ProbeStatusSink; import com.datadog.debugger.sink.Sink; import com.datadog.debugger.sink.Snapshot; import datadog.trace.api.Config; @@ -29,7 +36,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import net.bytebuddy.agent.ByteBuddyAgent; @@ -38,6 +44,11 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.tree.ClassNode; +import org.objectweb.asm.tree.MethodNode; +import org.objectweb.asm.tree.VarInsnNode; public class DebuggerTransformerTest { private static final String LANGUAGE = "java"; @@ -59,7 +70,6 @@ static class TestSnapshotListener implements Sink { boolean skipped; DebuggerContext.SkipCause cause; List snapshots = new ArrayList<>(); - Map> errors = new HashMap<>(); @Override public void skipSnapshot(String probeId, DebuggerContext.SkipCause cause) { @@ -71,11 +81,6 @@ public void skipSnapshot(String probeId, DebuggerContext.SkipCause cause) { public void addSnapshot(Snapshot snapshot) { snapshots.add(snapshot); } - - @Override - public void addDiagnostics(ProbeId probeId, List messages) { - errors.computeIfAbsent(probeId.getId(), k -> new ArrayList<>()).addAll(messages); - } } static final String VAR_NAME = "var"; @@ -143,7 +148,7 @@ void setup() { @Test public void testDump() { - Config config = mock(Config.class); + Config config = createConfig(); when(config.isDebuggerClassFileDumpEnabled()).thenReturn(true); File instrumentedClassFile = new File("/tmp/debugger/java.util.ArrayList.class"); File origClassFile = new File("/tmp/debugger/java.util.ArrayList_orig.class"); @@ -157,7 +162,7 @@ public void testDump() { LogProbe.builder().where("java.util.ArrayList", "add").probeId("", 0).build(); DebuggerTransformer debuggerTransformer = new DebuggerTransformer( - config, new Configuration(SERVICE_NAME, Collections.singletonList(logProbe)), null); + config, new Configuration(SERVICE_NAME, Collections.singletonList(logProbe))); debuggerTransformer.transform( ClassLoader.getSystemClassLoader(), "java.util.ArrayList", @@ -188,7 +193,7 @@ public void testMultiProbesSimpleName() { private void doTestMultiProbes( Function, String> getClassName, ProbeTestInfo... probeInfos) { - Config config = mock(Config.class); + Config config = createConfig(); List logProbes = new ArrayList<>(); for (ProbeTestInfo probeInfo : probeInfos) { String className = getClassName.apply(probeInfo.clazz); @@ -239,7 +244,7 @@ public ProbeTestInfo(Class clazz, String methodName, String signature) { @Test public void testBlockedProbes() { - Config config = mock(Config.class); + Config config = createConfig(); List logProbes = Arrays.asList( LogProbe.builder() @@ -251,7 +256,10 @@ public void testBlockedProbes() { AtomicReference lastResult = new AtomicReference<>(null); DebuggerTransformer debuggerTransformer = new DebuggerTransformer( - config, configuration, ((definition, result) -> lastResult.set(result))); + config, + configuration, + ((definition, result) -> lastResult.set(result)), + new DebuggerSink(config)); byte[] newClassBuffer = debuggerTransformer.transform( ClassLoader.getSystemClassLoader(), @@ -268,14 +276,17 @@ public void testBlockedProbes() { @Test public void classBeingRedefinedNull() { - Config config = mock(Config.class); + Config config = createConfig(); LogProbe logProbe = LogProbe.builder().where("ArrayList", "add").probeId("", 0).build(); Configuration configuration = new Configuration(SERVICE_NAME, Collections.singletonList(logProbe)); AtomicReference lastResult = new AtomicReference<>(null); DebuggerTransformer debuggerTransformer = new DebuggerTransformer( - config, configuration, ((definition, result) -> lastResult.set(result))); + config, + configuration, + ((definition, result) -> lastResult.set(result)), + new DebuggerSink(config)); byte[] newClassBuffer = debuggerTransformer.transform( ClassLoader.getSystemClassLoader(), @@ -289,4 +300,90 @@ public void classBeingRedefinedNull() { Assertions.assertTrue(lastResult.get().isInstalled()); Assertions.assertEquals("java.util.ArrayList", lastResult.get().getTypeName()); } + + @Test + public void classGenerationFailed() { + Config config = createConfig(); + TestSnapshotListener listener = new TestSnapshotListener(); + DebuggerAgentHelper.injectSink(listener); + final String CLASS_NAME = DebuggerAgent.class.getTypeName(); + final String METHOD_NAME = "run"; + MockProbe mockProbe = MockProbe.builder(PROBE_ID).where(CLASS_NAME, METHOD_NAME).build(); + LogProbe logProbe1 = + LogProbe.builder().probeId("logprobe1", 0).where(CLASS_NAME, METHOD_NAME).build(); + LogProbe logProbe2 = + LogProbe.builder().probeId("logprobe2", 0).where(CLASS_NAME, METHOD_NAME).build(); + Configuration configuration = + Configuration.builder() + .setService(SERVICE_NAME) + .addSpanProbes(Collections.singletonList(mockProbe)) + .addLogProbes(Arrays.asList(logProbe1, logProbe2)) + .build(); + AtomicReference lastResult = new AtomicReference<>(null); + ProbeStatusSink probeStatusSink = mock(ProbeStatusSink.class); + DebuggerTransformer debuggerTransformer = + new DebuggerTransformer( + config, + configuration, + ((definition, result) -> lastResult.set(result)), + new DebuggerSink(config, probeStatusSink)); + byte[] newClassBuffer = + debuggerTransformer.transform( + ClassLoader.getSystemClassLoader(), + "com/datadog/debugger/agent/DebuggerAgent", + null, + null, + getClassFileBytes(DebuggerAgent.class)); + Assertions.assertNull(newClassBuffer); + ArgumentCaptor strCaptor = ArgumentCaptor.forClass(String.class); + ArgumentCaptor probeIdCaptor = ArgumentCaptor.forClass(ProbeId.class); + verify(probeStatusSink, times(3)).addError(probeIdCaptor.capture(), strCaptor.capture()); + Assertions.assertEquals("logprobe1", probeIdCaptor.getAllValues().get(0).getId()); + Assertions.assertEquals("logprobe2", probeIdCaptor.getAllValues().get(1).getId()); + Assertions.assertEquals(PROBE_ID.getId(), probeIdCaptor.getAllValues().get(2).getId()); + Assertions.assertEquals( + "Instrumentation fails for " + CLASS_NAME, strCaptor.getAllValues().get(0)); + Assertions.assertEquals( + "Instrumentation fails for " + CLASS_NAME, strCaptor.getAllValues().get(1)); + Assertions.assertEquals( + "Instrumentation fails for " + CLASS_NAME, strCaptor.getAllValues().get(2)); + } + + private Config createConfig() { + Config config = mock(Config.class); + when(config.getFinalDebuggerSnapshotUrl()) + .thenReturn("http://localhost:8126/debugger/v1/input"); + when(config.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost:8126/symdb/v1/input"); + when(config.getDebuggerUploadBatchSize()).thenReturn(100); + return config; + } + + private static class MockProbe extends SpanProbe { + + public MockProbe(ProbeId probeId, Where where) { + super(LANGUAGE, probeId, null, where); + } + + @Override + public InstrumentationResult.Status instrument( + ClassLoader classLoader, + ClassNode classNode, + MethodNode methodNode, + List diagnostics, + List probeIds) { + methodNode.instructions.insert( + new VarInsnNode(Opcodes.ASTORE, methodNode.localVariables.size())); + return InstrumentationResult.Status.INSTALLED; + } + + public static MockProbe.Builder builder(ProbeId probeId) { + return new MockProbe.Builder().probeId(probeId); + } + + public static class Builder extends ProbeDefinition.Builder { + public MockProbe build() { + return new MockProbe(probeId, where); + } + } + } } diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/LogMessageTemplateBuilderTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/LogMessageTemplateBuilderTest.java index ed26ece891f..0089376522b 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/LogMessageTemplateBuilderTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/LogMessageTemplateBuilderTest.java @@ -5,6 +5,9 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import com.datadog.debugger.el.DSL; +import com.datadog.debugger.el.ValueScript; +import com.datadog.debugger.el.values.StringValue; import com.datadog.debugger.probe.LogProbe; import datadog.trace.bootstrap.debugger.CapturedContext; import datadog.trace.bootstrap.debugger.EvaluationError; @@ -16,6 +19,7 @@ import java.util.List; import java.util.Map; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledIf; import org.junit.jupiter.api.condition.EnabledOnJre; import org.junit.jupiter.api.condition.JRE; @@ -66,6 +70,24 @@ public void argTemplate() { assertEquals("foo", message); } + @Test + public void booleanArgTemplate() { + List segments = new ArrayList<>(); + segments.add( + new LogProbe.Segment( + new ValueScript( + DSL.bool(DSL.contains(DSL.ref("arg"), new StringValue("o"))), "{arg}"))); + LogProbe probe = LogProbe.builder().template("{contains(arg, 'o')}", segments).build(); + LogMessageTemplateBuilder summaryBuilder = new LogMessageTemplateBuilder(probe.getSegments()); + CapturedContext capturedContext = new CapturedContext(); + capturedContext.addArguments( + new CapturedContext.CapturedValue[] { + CapturedContext.CapturedValue.of("arg", String.class.getTypeName(), "foo") + }); + String message = summaryBuilder.evaluate(capturedContext, new LogProbe.LogStatus(probe)); + assertEquals("true", message); + } + @Test public void argMultipleInFlightTemplate() { LogProbe probe = createLogProbe("{arg}"); @@ -203,6 +225,7 @@ public void argComplexObjectArrayTemplate() { @Test @EnabledOnJre(JRE.JAVA_17) + @DisabledIf("datadog.trace.api.Platform#isJ9") public void argInaccessibleFieldTemplate() { LogProbe probe = createLogProbe("{obj}"); LogMessageTemplateBuilder summaryBuilder = new LogMessageTemplateBuilder(probe.getSegments()); diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/LogProbesInstrumentationTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/LogProbesInstrumentationTest.java index 057f8f028e6..47f80e0c94d 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/LogProbesInstrumentationTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/LogProbesInstrumentationTest.java @@ -283,7 +283,7 @@ public void lineTemplateNullFieldLog() throws IOException, URISyntaxException { "this is log line with field={Cannot dereference to field: intValue}", snapshot.getMessage()); assertEquals(1, snapshot.getEvaluationErrors().size()); - assertEquals("intValue", snapshot.getEvaluationErrors().get(0).getExpr()); + assertEquals("nullObject.intValue", snapshot.getEvaluationErrors().get(0).getExpr()); assertEquals( "Cannot dereference to field: intValue", snapshot.getEvaluationErrors().get(0).getMessage()); @@ -486,8 +486,11 @@ private DebuggerTransformerTest.TestSnapshotListener installProbes( Config config = mock(Config.class); when(config.isDebuggerEnabled()).thenReturn(true); when(config.isDebuggerClassFileDumpEnabled()).thenReturn(true); - when(config.isDebuggerVerifyByteCode()).thenReturn(true); - currentTransformer = new DebuggerTransformer(config, configuration, null); + when(config.getFinalDebuggerSnapshotUrl()) + .thenReturn("http://localhost:8126/debugger/v1/input"); + when(config.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost:8126/symdb/v1/input"); + when(config.getDebuggerUploadBatchSize()).thenReturn(100); + currentTransformer = new DebuggerTransformer(config, configuration); instr.addTransformer(currentTransformer); DebuggerTransformerTest.TestSnapshotListener listener = new DebuggerTransformerTest.TestSnapshotListener(); diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/MetricProbesInstrumentationTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/MetricProbesInstrumentationTest.java index 15faf2ee2f0..d898d7d904a 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/MetricProbesInstrumentationTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/MetricProbesInstrumentationTest.java @@ -4,7 +4,12 @@ import static com.datadog.debugger.probe.MetricProbe.MetricKind.DISTRIBUTION; import static com.datadog.debugger.probe.MetricProbe.MetricKind.GAUGE; import static com.datadog.debugger.probe.MetricProbe.MetricKind.HISTOGRAM; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static utils.InstrumentationTestHelper.compileAndLoadClass; @@ -12,8 +17,8 @@ import com.datadog.debugger.el.ValueScript; import com.datadog.debugger.instrumentation.DiagnosticMessage; import com.datadog.debugger.probe.MetricProbe; -import com.datadog.debugger.sink.Sink; -import com.datadog.debugger.sink.Snapshot; +import com.datadog.debugger.sink.DebuggerSink; +import com.datadog.debugger.sink.ProbeStatusSink; import datadog.trace.api.Config; import datadog.trace.bootstrap.debugger.DebuggerContext; import datadog.trace.bootstrap.debugger.MethodLocation; @@ -32,6 +37,7 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; public class MetricProbesInstrumentationTest { private static final String LANGUAGE = "java"; @@ -43,9 +49,10 @@ public class MetricProbesInstrumentationTest { private static final String METRIC_PROBEID_TAG = "debugger.probeid:beae1807-f3b0-4ea8-a74f-826790c5e6f8"; + private final List currentDiagnostics = new ArrayList<>(); private Instrumentation instr = ByteBuddyAgent.install(); private ClassFileTransformer currentTransformer; - private MockSink mockSink; + private ProbeStatusSink probeStatusSink; @AfterEach public void after() { @@ -125,9 +132,8 @@ public void invalidConstantValueMetric() throws IOException, URISyntaxException int result = Reflect.on(testClass).call("main", "1").get(); Assertions.assertEquals(3, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME)); - Assertions.assertEquals( - "Unsupported constant value: 42.0 type: java.lang.Double", - mockSink.getCurrentDiagnostics().get(0).getMessage()); + verify(probeStatusSink) + .addError(eq(METRIC_ID), eq("Unsupported constant value: 42.0 type: java.lang.Double")); } @Test @@ -146,8 +152,7 @@ public void invalidValueMetric() throws IOException, URISyntaxException { int result = Reflect.on(testClass).call("main", "1").get(); Assertions.assertEquals(3, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME)); - Assertions.assertEquals( - "Cannot resolve symbol value", mockSink.getCurrentDiagnostics().get(0).getMessage()); + verify(probeStatusSink).addError(eq(METRIC_ID), eq("Cannot resolve symbol value")); } @Test @@ -175,11 +180,10 @@ public void multiInvalidValueMetric() throws IOException, URISyntaxException { int result = Reflect.on(testClass).call("main", "1").get(); Assertions.assertEquals(3, result); Assertions.assertEquals(0, listener.counters.size()); - Assertions.assertEquals(2, mockSink.getCurrentDiagnostics().size()); - Assertions.assertEquals( - "Cannot resolve symbol value", mockSink.getCurrentDiagnostics().get(0).getMessage()); - Assertions.assertEquals( - "Cannot resolve symbol invalid", mockSink.getCurrentDiagnostics().get(1).getMessage()); + ArgumentCaptor msgCaptor = ArgumentCaptor.forClass(String.class); + verify(probeStatusSink, times(2)).addError(any(), msgCaptor.capture()); + Assertions.assertEquals("Cannot resolve symbol value", msgCaptor.getAllValues().get(0)); + Assertions.assertEquals("Cannot resolve symbol invalid", msgCaptor.getAllValues().get(1)); } @Test @@ -398,8 +402,7 @@ public void invalidNameArgumentRefValueCountMetric() throws IOException, URISynt int result = Reflect.on(testClass).call("main", "1").get(); Assertions.assertEquals(48, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME)); - Assertions.assertEquals( - "Cannot resolve symbol foo", mockSink.getCurrentDiagnostics().get(0).getMessage()); + verify(probeStatusSink).addError(eq(METRIC_ID), eq("Cannot resolve symbol foo")); } @Test @@ -418,9 +421,10 @@ public void invalidTypeArgumentRefValueCountMetric() throws IOException, URISynt int result = Reflect.on(testClass).call("main", "1").get(); Assertions.assertEquals(48, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME)); - Assertions.assertEquals( - "Incompatible type for expression: java.lang.String with expected types: [long]", - mockSink.getCurrentDiagnostics().get(0).getMessage()); + verify(probeStatusSink) + .addError( + eq(METRIC_ID), + eq("Incompatible type for expression: java.lang.String with expected types: [long]")); } @Test @@ -450,8 +454,7 @@ public void invalidNameLocalRefValueCountMetric() throws IOException, URISyntaxE int result = Reflect.on(testClass).call("main", "1").get(); Assertions.assertEquals(3, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME)); - Assertions.assertEquals( - "Cannot resolve symbol foo", mockSink.getCurrentDiagnostics().get(0).getMessage()); + verify(probeStatusSink).addError(eq(METRIC_ID), eq("Cannot resolve symbol foo")); } @Test @@ -466,9 +469,10 @@ public void invalidTypeLocalRefValueCountMetric() throws IOException, URISyntaxE int result = Reflect.on(testClass).call("main", "1").get(); Assertions.assertEquals(3, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME)); - Assertions.assertEquals( - "Incompatible type for expression: java.lang.String with expected types: [long]", - mockSink.getCurrentDiagnostics().get(0).getMessage()); + verify(probeStatusSink) + .addError( + eq(METRIC_ID), + eq("Incompatible type for expression: java.lang.String with expected types: [long]")); } @Test @@ -594,7 +598,7 @@ public void nullMultiFieldRefValueCountMetric() throws IOException, URISyntaxExc Assertions.assertEquals(143, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME1)); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME2)); - Assertions.assertTrue(mockSink.getCurrentDiagnostics().isEmpty()); + verify(probeStatusSink, times(0)).addError(any(), anyString()); } @Test @@ -626,10 +630,10 @@ public void invalidNameMultiFieldRefValueCountMetric() throws IOException, URISy Assertions.assertEquals(143, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME1)); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME2)); - Assertions.assertEquals( - "Cannot resolve field foovalue", mockSink.getCurrentDiagnostics().get(0).getMessage()); - Assertions.assertEquals( - "Cannot resolve field foovalue", mockSink.getCurrentDiagnostics().get(1).getMessage()); + ArgumentCaptor strCaptor = ArgumentCaptor.forClass(String.class); + verify(probeStatusSink, times(2)).addError(any(), strCaptor.capture()); + Assertions.assertEquals("Cannot resolve field foovalue", strCaptor.getAllValues().get(0)); + Assertions.assertEquals("Cannot resolve field foovalue", strCaptor.getAllValues().get(1)); } @Test @@ -661,12 +665,14 @@ public void invalidTypeMultiFieldRefValueCountMetric() throws IOException, URISy Assertions.assertEquals(143, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME1)); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME2)); + ArgumentCaptor strCaptor = ArgumentCaptor.forClass(String.class); + verify(probeStatusSink, times(2)).addError(any(), strCaptor.capture()); Assertions.assertEquals( "Incompatible type for expression: java.lang.String with expected types: [long]", - mockSink.getCurrentDiagnostics().get(0).getMessage()); + strCaptor.getAllValues().get(0)); Assertions.assertEquals( "Incompatible type for expression: java.lang.String with expected types: [long]", - mockSink.getCurrentDiagnostics().get(1).getMessage()); + strCaptor.getAllValues().get(1)); } @Test @@ -686,8 +692,7 @@ public void invalidNameFieldRefValueCountMetric() throws IOException, URISyntaxE int result = Reflect.on(testClass).call("main", "f").get(); Assertions.assertEquals(42, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME)); - Assertions.assertEquals( - "Cannot resolve symbol fooValue", mockSink.getCurrentDiagnostics().get(0).getMessage()); + verify(probeStatusSink).addError(eq(METRIC_ID), eq("Cannot resolve symbol fooValue")); } @Test @@ -706,9 +711,10 @@ public void invalidTypeFieldRefValueCountMetric() throws IOException, URISyntaxE int result = Reflect.on(testClass).call("main", "f").get(); Assertions.assertEquals(42, result); Assertions.assertFalse(listener.counters.containsKey(METRIC_NAME)); - Assertions.assertEquals( - "Incompatible type for expression: java.lang.String with expected types: [long]", - mockSink.getCurrentDiagnostics().get(0).getMessage()); + verify(probeStatusSink) + .addError( + eq(METRIC_ID), + eq("Incompatible type for expression: java.lang.String with expected types: [long]")); } @Test @@ -867,16 +873,16 @@ public void nullExpression() throws IOException, URISyntaxException { int result = Reflect.on(testClass).call("main", "f").get(); Assertions.assertEquals(42, result); Assertions.assertEquals(0, listener.gauges.size()); - Assertions.assertEquals(3, mockSink.getCurrentDiagnostics().size()); + ArgumentCaptor strCaptor = ArgumentCaptor.forClass(String.class); + verify(probeStatusSink, times(3)).addError(any(), strCaptor.capture()); Assertions.assertEquals( - "Unsupported type for len operation: java.lang.Object", - mockSink.getCurrentDiagnostics().get(0).getMessage()); + "Unsupported type for len operation: java.lang.Object", strCaptor.getAllValues().get(0)); Assertions.assertEquals( "Incompatible type for expression: java.lang.String with expected types: [long,double]", - mockSink.getCurrentDiagnostics().get(1).getMessage()); + strCaptor.getAllValues().get(1)); Assertions.assertEquals( "Incompatible type for expression: java.lang.Object with expected types: [long,double]", - mockSink.getCurrentDiagnostics().get(2).getMessage()); + strCaptor.getAllValues().get(2)); } @Test @@ -949,9 +955,11 @@ public void indexInvalidKeyTypeExpression() throws IOException, URISyntaxExcepti Assertions.assertEquals(42, result); Assertions.assertFalse(listener.gauges.containsKey(ARRAYSTRIDX_METRIC)); Assertions.assertFalse(listener.gauges.containsKey(ARRAYOOBIDX_METRIC)); - Assertions.assertEquals( - "Incompatible type for key: Type{mainType=Ljava/lang/String;, genericTypes=[]}, expected int or long", - mockSink.getCurrentDiagnostics().get(0).getMessage()); + verify(probeStatusSink, times(2)) + .addError( + eq(METRIC_ID), + eq( + "Incompatible type for key: Type{mainType=Ljava/lang/String;, genericTypes=[]}, expected int or long")); } @Test @@ -1169,11 +1177,15 @@ private MetricForwarderListener installMetricProbes(Configuration configuration) Config config = mock(Config.class); when(config.isDebuggerEnabled()).thenReturn(true); when(config.isDebuggerClassFileDumpEnabled()).thenReturn(true); - currentTransformer = new DebuggerTransformer(config, configuration); + when(config.getFinalDebuggerSnapshotUrl()) + .thenReturn("http://localhost:8126/debugger/v1/input"); + when(config.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost:8126/symdb/v1/input"); + probeStatusSink = mock(ProbeStatusSink.class); + currentTransformer = + new DebuggerTransformer( + config, configuration, null, new DebuggerSink(config, probeStatusSink)); instr.addTransformer(currentTransformer); MetricForwarderListener listener = new MetricForwarderListener(); - mockSink = new MockSink(); - DebuggerAgentHelper.injectSink(mockSink); DebuggerContext.init(null, listener); DebuggerContext.initClassFilter(new DenyListHelper(null)); return listener; @@ -1265,27 +1277,4 @@ public void setThrowing(boolean value) { this.throwing = value; } } - - private static class MockSink implements Sink { - - private final List currentDiagnostics = new ArrayList<>(); - - @Override - public void addSnapshot(Snapshot snapshot) {} - - @Override - public void skipSnapshot(String probeId, DebuggerContext.SkipCause cause) {} - - @Override - public void addDiagnostics(ProbeId probeId, List messages) { - for (DiagnosticMessage msg : messages) { - System.out.println(msg); - } - currentDiagnostics.addAll(messages); - } - - public List getCurrentDiagnostics() { - return currentDiagnostics; - } - } } diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ProbeInstrumentationTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ProbeInstrumentationTest.java index 628192c3501..fa6ff412955 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ProbeInstrumentationTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/ProbeInstrumentationTest.java @@ -1,10 +1,9 @@ package com.datadog.debugger.agent; -import com.datadog.debugger.instrumentation.DiagnosticMessage; +import com.datadog.debugger.sink.ProbeStatusSink; import com.datadog.debugger.sink.Sink; import com.datadog.debugger.sink.Snapshot; import datadog.trace.bootstrap.debugger.DebuggerContext; -import datadog.trace.bootstrap.debugger.ProbeId; import java.lang.instrument.ClassFileTransformer; import java.lang.instrument.Instrumentation; import java.util.ArrayList; @@ -18,6 +17,7 @@ public class ProbeInstrumentationTest { protected Instrumentation instr = ByteBuddyAgent.install(); protected ClassFileTransformer currentTransformer; protected MockSink mockSink; + protected ProbeStatusSink probeStatusSink; @AfterEach public void after() { @@ -28,7 +28,6 @@ public void after() { protected static class MockSink implements Sink { - private final List currentDiagnostics = new ArrayList<>(); private final List snapshots = new ArrayList<>(); @Override @@ -39,18 +38,6 @@ public void addSnapshot(Snapshot snapshot) { @Override public void skipSnapshot(String probeId, DebuggerContext.SkipCause cause) {} - @Override - public void addDiagnostics(ProbeId probeId, List messages) { - for (DiagnosticMessage msg : messages) { - System.out.println(msg); - } - currentDiagnostics.addAll(messages); - } - - public List getCurrentDiagnostics() { - return currentDiagnostics; - } - public List getSnapshots() { return snapshots; } diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SnapshotSerializationTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SnapshotSerializationTest.java index 7492d3c75a3..ab1438171b7 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SnapshotSerializationTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SnapshotSerializationTest.java @@ -54,13 +54,13 @@ import java.util.List; import java.util.Map; import java.util.Random; -import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.LockSupport; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledIf; import org.junit.jupiter.api.condition.EnabledOnJre; import org.junit.jupiter.api.condition.JRE; @@ -96,6 +96,7 @@ public void roundTripProbeDetails() throws IOException { @Test @EnabledOnJre(JRE.JAVA_17) + @DisabledIf("datadog.trace.api.Platform#isJ9") public void roundTripCapturedValue() throws IOException, URISyntaxException { JsonAdapter adapter = createSnapshotAdapter(); Snapshot snapshot = createSnapshot(); @@ -700,20 +701,13 @@ public void capturedValueAdapterNull() { } @Test - public void collectionValueThrows() throws IOException { + public void collectionUnknown() throws IOException { JsonAdapter adapter = createSnapshotAdapter(); Snapshot snapshot = createSnapshot(); CapturedContext context = new CapturedContext(); CapturedContext.CapturedValue listLocal = CapturedContext.CapturedValue.of( - "listLocal", - List.class.getTypeName(), - new ArrayList() { - @Override - public int size() { - throw new UnsupportedOperationException(); - } - }); + "listLocal", List.class.getTypeName(), new ArrayList() {}); context.addLocals(new CapturedContext.CapturedValue[] {listLocal}); snapshot.setExit(context); String buffer = adapter.toJson(snapshot); @@ -721,24 +715,18 @@ public int size() { Map locals = getLocalsFromJson(buffer); Map mapFieldObj = (Map) locals.get("listLocal"); Assertions.assertEquals( - "java.lang.UnsupportedOperationException", mapFieldObj.get(NOT_CAPTURED_REASON)); + "java.lang.RuntimeException: Unsupported Collection type: com.datadog.debugger.agent.SnapshotSerializationTest$1", + mapFieldObj.get(NOT_CAPTURED_REASON)); } @Test - public void mapValueThrows() throws IOException { + public void mapValueUnknown() throws IOException { JsonAdapter adapter = createSnapshotAdapter(); Snapshot snapshot = createSnapshot(); CapturedContext context = new CapturedContext(); CapturedContext.CapturedValue mapLocal = CapturedContext.CapturedValue.of( - "mapLocal", - Map.class.getTypeName(), - new HashMap() { - @Override - public Set> entrySet() { - throw new UnsupportedOperationException(); - } - }); + "mapLocal", Map.class.getTypeName(), new HashMap() {}); context.addLocals(new CapturedContext.CapturedValue[] {mapLocal}); snapshot.setExit(context); String buffer = adapter.toJson(snapshot); @@ -746,7 +734,8 @@ public Set> entrySet() { Map locals = getLocalsFromJson(buffer); Map mapFieldObj = (Map) locals.get("mapLocal"); Assertions.assertEquals( - "java.lang.UnsupportedOperationException", mapFieldObj.get(NOT_CAPTURED_REASON)); + "java.lang.RuntimeException: Unsupported Map type: com.datadog.debugger.agent.SnapshotSerializationTest$2", + mapFieldObj.get(NOT_CAPTURED_REASON)); } @Test diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SpanDecorationProbeInstrumentationTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SpanDecorationProbeInstrumentationTest.java index 1e09cc7a4f9..b19401ac7a2 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SpanDecorationProbeInstrumentationTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SpanDecorationProbeInstrumentationTest.java @@ -6,8 +6,10 @@ import static com.datadog.debugger.probe.SpanDecorationProbe.TargetSpan.ACTIVE; import static com.datadog.debugger.probe.SpanDecorationProbe.TargetSpan.ROOT; import static com.datadog.debugger.util.LogProbeTestHelper.parseTemplate; +import static java.util.Collections.singletonList; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -16,16 +18,22 @@ import com.datadog.debugger.el.DSL; import com.datadog.debugger.el.ProbeCondition; import com.datadog.debugger.el.expressions.BooleanExpression; +import com.datadog.debugger.el.values.StringValue; +import com.datadog.debugger.probe.LogProbe; import com.datadog.debugger.probe.SpanDecorationProbe; +import com.datadog.debugger.sink.DebuggerSink; +import com.datadog.debugger.sink.ProbeStatusSink; import com.datadog.debugger.sink.Snapshot; import datadog.trace.agent.tooling.TracerInstaller; import datadog.trace.api.Config; import datadog.trace.api.interceptor.MutableSpan; import datadog.trace.api.interceptor.TraceInterceptor; +import datadog.trace.bootstrap.debugger.CapturedContext; import datadog.trace.bootstrap.debugger.DebuggerContext; import datadog.trace.bootstrap.debugger.MethodLocation; import datadog.trace.bootstrap.debugger.ProbeId; import datadog.trace.bootstrap.debugger.ProbeImplementation; +import datadog.trace.bootstrap.debugger.util.Redaction; import datadog.trace.core.CoreTracer; import java.io.IOException; import java.net.URISyntaxException; @@ -34,6 +42,7 @@ import java.util.Collection; import java.util.List; import org.joor.Reflect; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -41,6 +50,8 @@ public class SpanDecorationProbeInstrumentationTest extends ProbeInstrumentationTest { private static final String LANGUAGE = "java"; private static final ProbeId PROBE_ID = new ProbeId("beae1807-f3b0-4ea8-a74f-826790c5e6f8", 0); + private static final ProbeId PROBE_ID1 = new ProbeId("beae1807-f3b0-4ea8-a74f-826790c5e6f6", 0); + private static final ProbeId PROBE_ID2 = new ProbeId("beae1807-f3b0-4ea8-a74f-826790c5e6f7", 0); private TestTraceInterceptor traceInterceptor = new TestTraceInterceptor(); @@ -51,6 +62,13 @@ public void setUp() { tracer.addTraceInterceptor(traceInterceptor); } + @Override + @AfterEach + public void after() { + super.after(); + Redaction.clearUserDefinedTypes(); + } + @Test public void methodActiveSpanSimpleTag() throws IOException, URISyntaxException { final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot20"; @@ -59,7 +77,7 @@ public void methodActiveSpanSimpleTag() throws IOException, URISyntaxException { CLASS_NAME, ACTIVE, decoration, "process", "int (java.lang.String)"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - assertEquals(42, result); + assertEquals(84, result); MutableSpan span = traceInterceptor.getFirstSpan(); assertEquals("1", span.getTags().get("tag1")); assertEquals(PROBE_ID.getId(), span.getTags().get("_dd.di.tag1.probe_id")); @@ -81,7 +99,7 @@ public void methodActiveSpanTagList() throws IOException, URISyntaxException { "int (java.lang.String)"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - assertEquals(42, result); + assertEquals(84, result); MutableSpan span = traceInterceptor.getFirstSpan(); assertEquals("1", span.getTags().get("tag1")); assertEquals("42", span.getTags().get("tag2")); @@ -124,7 +142,7 @@ public void methodActiveSpanCondition() throws IOException, URISyntaxException { Class testClass = compileAndLoadClass(CLASS_NAME); for (int i = 0; i < 10; i++) { int result = Reflect.on(testClass).call("main", String.valueOf(i)).get(); - assertEquals(42, result); + assertEquals(84, result); } assertEquals(10, traceInterceptor.getAllTraces().size()); MutableSpan span = traceInterceptor.getAllTraces().get(5).get(0); @@ -139,7 +157,7 @@ public void methodTagEvalError() throws IOException, URISyntaxException { CLASS_NAME, ACTIVE, decoration, "process", "int (java.lang.String)"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - assertEquals(42, result); + assertEquals(84, result); MutableSpan span = traceInterceptor.getFirstSpan(); assertNull(span.getTags().get("tag1")); assertEquals("Cannot find symbol: noarg", span.getTags().get("_dd.di.tag1.evaluation_error")); @@ -154,7 +172,7 @@ public void methodActiveSpanInvalidCondition() throws IOException, URISyntaxExce CLASS_NAME, ACTIVE, decoration, "process", "int (java.lang.String)"); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "5").get(); - assertEquals(42, result); + assertEquals(84, result); assertFalse(traceInterceptor.getFirstSpan().getTags().containsKey("tag1")); assertEquals(1, mockSink.getSnapshots().size()); Snapshot snapshot = mockSink.getSnapshots().get(0); @@ -166,10 +184,10 @@ public void methodActiveSpanInvalidCondition() throws IOException, URISyntaxExce public void lineActiveSpanSimpleTag() throws IOException, URISyntaxException { final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot20"; SpanDecorationProbe.Decoration decoration = createDecoration("tag1", "{arg}"); - installSingleSpanDecoration(CLASS_NAME, ACTIVE, decoration, "CapturedSnapshot20.java", 37); + installSingleSpanDecoration(CLASS_NAME, ACTIVE, decoration, "CapturedSnapshot20.java", 38); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "1").get(); - assertEquals(42, result); + assertEquals(84, result); MutableSpan span = traceInterceptor.getFirstSpan(); assertEquals("1", span.getTags().get("tag1")); assertEquals(PROBE_ID.getId(), span.getTags().get("_dd.di.tag1.probe_id")); @@ -198,11 +216,11 @@ public void lineActiveSpanCondition() throws IOException, URISyntaxException { final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot20"; SpanDecorationProbe.Decoration decoration = createDecoration(eq(ref("arg"), value("5")), "arg == '5'", "tag1", "{arg}"); - installSingleSpanDecoration(CLASS_NAME, ACTIVE, decoration, "CapturedSnapshot20.java", 37); + installSingleSpanDecoration(CLASS_NAME, ACTIVE, decoration, "CapturedSnapshot20.java", 38); Class testClass = compileAndLoadClass(CLASS_NAME); for (int i = 0; i < 10; i++) { int result = Reflect.on(testClass).call("main", String.valueOf(i)).get(); - assertEquals(42, result); + assertEquals(84, result); } assertEquals(10, traceInterceptor.getAllTraces().size()); MutableSpan span = traceInterceptor.getAllTraces().get(5).get(0); @@ -214,10 +232,10 @@ public void lineActiveSpanInvalidCondition() throws IOException, URISyntaxExcept final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot20"; SpanDecorationProbe.Decoration decoration = createDecoration(eq(ref("noarg"), value("5")), "arg == '5'", "tag1", "{arg}"); - installSingleSpanDecoration(CLASS_NAME, ACTIVE, decoration, "CapturedSnapshot20.java", 37); + installSingleSpanDecoration(CLASS_NAME, ACTIVE, decoration, "CapturedSnapshot20.java", 38); Class testClass = compileAndLoadClass(CLASS_NAME); int result = Reflect.on(testClass).call("main", "5").get(); - assertEquals(42, result); + assertEquals(84, result); assertFalse(traceInterceptor.getFirstSpan().getTags().containsKey("tag1")); assertEquals(1, mockSink.getSnapshots().size()); Snapshot snapshot = mockSink.getSnapshots().get(0); @@ -236,6 +254,207 @@ public void nullActiveSpan() throws IOException, URISyntaxException { assertEquals(0, traceInterceptor.getAllTraces().size()); } + @Test + public void mixedWithLogProbes() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot20"; + SpanDecorationProbe.Decoration decoration = createDecoration("tag1", "{intLocal}"); + SpanDecorationProbe spanDecoProbe = + createProbeBuilder( + PROBE_ID, ACTIVE, singletonList(decoration), CLASS_NAME, "process", null, null) + .evaluateAt(MethodLocation.EXIT) + .build(); + LogProbe logProbe1 = + LogProbe.builder() + .probeId(PROBE_ID1) + .where(CLASS_NAME, "process") + .captureSnapshot(true) + .build(); + LogProbe logProbe2 = + LogProbe.builder() + .probeId(PROBE_ID2) + .where(CLASS_NAME, "process") + .captureSnapshot(true) + .build(); + Configuration configuration = + Configuration.builder() + .setService(SERVICE_NAME) + .add(logProbe1) + .add(logProbe2) + .add(spanDecoProbe) + .build(); + installSpanDecorationProbes(CLASS_NAME, configuration); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "1").get(); + assertEquals(84, result); + MutableSpan span = traceInterceptor.getFirstSpan(); + assertEquals("84", span.getTags().get("tag1")); + assertEquals(PROBE_ID.getId(), span.getTags().get("_dd.di.tag1.probe_id")); + List snapshots = mockSink.getSnapshots(); + assertEquals(2, snapshots.size()); + CapturedContext.CapturedValue intLocal = + snapshots.get(0).getCaptures().getReturn().getLocals().get("intLocal"); + assertNotNull(intLocal); + } + + @Test + public void keywordRedaction() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot28"; + SpanDecorationProbe.Decoration decoration1 = createDecoration("tag1", "{password}"); + SpanDecorationProbe.Decoration decoration2 = createDecoration("tag2", "{this.password}"); + SpanDecorationProbe.Decoration decoration3 = createDecoration("tag3", "{strMap['password']}"); + List decorations = + Arrays.asList(decoration1, decoration2, decoration3); + installSingleSpanDecoration( + CLASS_NAME, ACTIVE, decorations, "process", "int (java.lang.String)"); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "secret123").get(); + assertEquals(42, result); + MutableSpan span = traceInterceptor.getFirstSpan(); + assertFalse(span.getTags().containsKey("tag1")); + assertEquals(PROBE_ID.getId(), span.getTags().get("_dd.di.tag1.probe_id")); + assertEquals( + "Could not evaluate the expression because 'password' was redacted", + span.getTags().get("_dd.di.tag1.evaluation_error")); + assertFalse(span.getTags().containsKey("tag2")); + assertEquals(PROBE_ID.getId(), span.getTags().get("_dd.di.tag2.probe_id")); + assertEquals( + "Could not evaluate the expression because 'this.password' was redacted", + span.getTags().get("_dd.di.tag2.evaluation_error")); + assertFalse(span.getTags().containsKey("tag3")); + assertEquals(PROBE_ID.getId(), span.getTags().get("_dd.di.tag3.probe_id")); + assertEquals( + "Could not evaluate the expression because 'strMap[\"password\"]' was redacted", + span.getTags().get("_dd.di.tag3.evaluation_error")); + } + + @Test + public void keywordRedactionConditions() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot28"; + SpanDecorationProbe.Decoration decoration1 = + createDecoration( + DSL.contains(DSL.getMember(DSL.ref("this"), "password"), new StringValue("123")), + "contains(this.password, '123')", + "tag1", + "foo"); + SpanDecorationProbe.Decoration decoration2 = + createDecoration( + DSL.eq(DSL.ref("password"), DSL.value("123")), "password == '123'", "tag2", "foo"); + SpanDecorationProbe.Decoration decoration3 = + createDecoration( + DSL.eq(DSL.index(DSL.ref("strMap"), DSL.value("password")), DSL.value("123")), + "strMap['password'] == '123'", + "tag3", + "foo"); + List decorations = + Arrays.asList(decoration1, decoration2, decoration3); + installSingleSpanDecoration( + CLASS_NAME, ACTIVE, decorations, "process", "int (java.lang.String)"); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "secret123").get(); + assertEquals(42, result); + assertFalse(traceInterceptor.getFirstSpan().getTags().containsKey("tag1")); + assertFalse(traceInterceptor.getFirstSpan().getTags().containsKey("tag2")); + assertFalse(traceInterceptor.getFirstSpan().getTags().containsKey("tag3")); + assertEquals(1, mockSink.getSnapshots().size()); + Snapshot snapshot = mockSink.getSnapshots().get(0); + assertEquals(3, snapshot.getEvaluationErrors().size()); + assertEquals( + "Could not evaluate the expression because 'this.password' was redacted", + snapshot.getEvaluationErrors().get(0).getMessage()); + assertEquals( + "Could not evaluate the expression because 'password' was redacted", + snapshot.getEvaluationErrors().get(1).getMessage()); + assertEquals( + "Could not evaluate the expression because 'strMap[\"password\"]' was redacted", + snapshot.getEvaluationErrors().get(2).getMessage()); + } + + @Test + public void typeRedaction() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot28"; + Config config = mock(Config.class); + when(config.getDebuggerRedactedTypes()) + .thenReturn("com.datadog.debugger.CapturedSnapshot28$Creds"); + Redaction.addUserDefinedTypes(config); + SpanDecorationProbe.Decoration decoration1 = createDecoration("tag1", "{creds}"); + SpanDecorationProbe.Decoration decoration2 = createDecoration("tag2", "{this.creds}"); + SpanDecorationProbe.Decoration decoration3 = createDecoration("tag3", "{credMap['dave']}"); + List decorations = + Arrays.asList(decoration1, decoration2, decoration3); + installSingleSpanDecoration( + CLASS_NAME, ACTIVE, decorations, "process", "int (java.lang.String)"); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "secret123").get(); + assertEquals(42, result); + MutableSpan span = traceInterceptor.getFirstSpan(); + assertFalse(span.getTags().containsKey("tag1")); + assertEquals(PROBE_ID.getId(), span.getTags().get("_dd.di.tag1.probe_id")); + assertEquals( + "Could not evaluate the expression because 'creds' was redacted", + span.getTags().get("_dd.di.tag1.evaluation_error")); + assertFalse(span.getTags().containsKey("tag2")); + assertEquals(PROBE_ID.getId(), span.getTags().get("_dd.di.tag2.probe_id")); + assertEquals( + "Could not evaluate the expression because 'this.creds' was redacted", + span.getTags().get("_dd.di.tag2.evaluation_error")); + assertFalse(span.getTags().containsKey("tag3")); + assertEquals(PROBE_ID.getId(), span.getTags().get("_dd.di.tag3.probe_id")); + assertEquals( + "Could not evaluate the expression because 'credMap[\"dave\"]' was redacted", + span.getTags().get("_dd.di.tag3.evaluation_error")); + } + + @Test + public void typeRedactionConditions() throws IOException, URISyntaxException { + final String CLASS_NAME = "com.datadog.debugger.CapturedSnapshot28"; + Config config = mock(Config.class); + when(config.getDebuggerRedactedTypes()) + .thenReturn("com.datadog.debugger.CapturedSnapshot28$Creds"); + Redaction.addUserDefinedTypes(config); + SpanDecorationProbe.Decoration decoration1 = + createDecoration( + DSL.contains( + DSL.getMember(DSL.getMember(DSL.ref("this"), "creds"), "secretCode"), + new StringValue("123")), + "contains(this.creds.secretCode, '123')", + "tag1", + "foo"); + SpanDecorationProbe.Decoration decoration2 = + createDecoration( + DSL.eq(DSL.getMember(DSL.ref("creds"), "secretCode"), DSL.value("123")), + "creds.secretCode == '123'", + "tag2", + "foo"); + SpanDecorationProbe.Decoration decoration3 = + createDecoration( + DSL.eq(DSL.index(DSL.ref("credMap"), DSL.value("dave")), DSL.value("123")), + "credMap['dave'] == '123'", + "tag3", + "foo"); + List decorations = + Arrays.asList(decoration1, decoration2, decoration3); + installSingleSpanDecoration( + CLASS_NAME, ACTIVE, decorations, "process", "int (java.lang.String)"); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "secret123").get(); + assertEquals(42, result); + assertFalse(traceInterceptor.getFirstSpan().getTags().containsKey("tag1")); + assertFalse(traceInterceptor.getFirstSpan().getTags().containsKey("tag2")); + assertFalse(traceInterceptor.getFirstSpan().getTags().containsKey("tag3")); + assertEquals(1, mockSink.getSnapshots().size()); + Snapshot snapshot = mockSink.getSnapshots().get(0); + assertEquals(3, snapshot.getEvaluationErrors().size()); + assertEquals( + "Could not evaluate the expression because 'this.creds' was redacted", + snapshot.getEvaluationErrors().get(0).getMessage()); + assertEquals( + "Could not evaluate the expression because 'creds' was redacted", + snapshot.getEvaluationErrors().get(1).getMessage()); + assertEquals( + "Could not evaluate the expression because 'credMap[\"dave\"]' was redacted", + snapshot.getEvaluationErrors().get(2).getMessage()); + } + private SpanDecorationProbe.Decoration createDecoration(String tagName, String valueDsl) { List tags = Arrays.asList( @@ -305,6 +524,7 @@ private static SpanDecorationProbe createProbe( String... lines) { return createProbeBuilder( id, targetSpan, decorationList, typeName, methodName, signature, lines) + .evaluateAt(MethodLocation.EXIT) .build(); } @@ -362,24 +582,30 @@ private void installSpanDecorationProbes(String expectedClassName, Configuration Config config = mock(Config.class); when(config.isDebuggerEnabled()).thenReturn(true); when(config.isDebuggerClassFileDumpEnabled()).thenReturn(true); - currentTransformer = new DebuggerTransformer(config, configuration); + when(config.getFinalDebuggerSnapshotUrl()) + .thenReturn("http://localhost:8126/debugger/v1/input"); + when(config.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost:8126/symdb/v1/input"); + probeStatusSink = mock(ProbeStatusSink.class); + currentTransformer = + new DebuggerTransformer( + config, configuration, null, new DebuggerSink(config, probeStatusSink)); instr.addTransformer(currentTransformer); mockSink = new MockSink(); DebuggerAgentHelper.injectSink(mockSink); DebuggerContext.init( - (id, callingClass) -> - resolver(id, callingClass, expectedClassName, configuration.getSpanDecorationProbes()), - null); + (id, callingClass) -> resolver(id, callingClass, expectedClassName, configuration), null); DebuggerContext.initClassFilter(new DenyListHelper(null)); } private ProbeImplementation resolver( - String id, - Class callingClass, - String expectedClassName, - Collection spanDecorationProbes) { + String id, Class callingClass, String expectedClassName, Configuration configuration) { Assertions.assertEquals(expectedClassName, callingClass.getName()); - for (SpanDecorationProbe probe : spanDecorationProbes) { + for (SpanDecorationProbe probe : configuration.getSpanDecorationProbes()) { + if (probe.getId().equals(id)) { + return probe; + } + } + for (LogProbe probe : configuration.getLogProbes()) { if (probe.getId().equals(id)) { return probe; } diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SpanProbeInstrumentationTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SpanProbeInstrumentationTest.java index a92d0a72064..0764e1a5734 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SpanProbeInstrumentationTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/agent/SpanProbeInstrumentationTest.java @@ -3,14 +3,19 @@ import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static utils.InstrumentationTestHelper.compileAndLoadClass; import com.datadog.debugger.probe.SpanProbe; +import com.datadog.debugger.sink.DebuggerSink; +import com.datadog.debugger.sink.ProbeStatusSink; import datadog.trace.api.Config; import datadog.trace.bootstrap.debugger.DebuggerContext; import datadog.trace.bootstrap.debugger.DebuggerSpan; +import datadog.trace.bootstrap.debugger.ProbeId; import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; @@ -20,7 +25,7 @@ import org.junit.jupiter.api.Test; public class SpanProbeInstrumentationTest extends ProbeInstrumentationTest { - private static final String SPAN_ID = "beae1807-f3b0-4ea8-a74f-826790c5e6f8"; + private static final ProbeId SPAN_ID = new ProbeId("beae1807-f3b0-4ea8-a74f-826790c5e6f8", 0); private static final String SPAN_PROBEID_TAG = "debugger.probeid:beae1807-f3b0-4ea8-a74f-826790c5e6f8"; @@ -80,6 +85,17 @@ public void lineRangeSimpleSpan() throws IOException, URISyntaxException { assertArrayEquals(new String[] {SPAN_PROBEID_TAG}, span.tags); } + @Test + public void lineRangeErrorSimpleSpan() throws IOException, URISyntaxException { + final String CLASS_NAME = "CapturedSnapshot01"; + MockTracer tracer = installSingleSpan(CLASS_NAME + ".java", 5, 9, null); + Class testClass = compileAndLoadClass(CLASS_NAME); + int result = Reflect.on(testClass).call("main", "1").get(); + assertEquals(3, result); + // instrumentation cannot happen, so no span generated + assertEquals(0, tracer.spans.size()); + } + @Test public void invalidLineSimpleSpan() throws IOException, URISyntaxException { final String CLASS_NAME = "CapturedSnapshot01"; @@ -88,9 +104,7 @@ public void invalidLineSimpleSpan() throws IOException, URISyntaxException { int result = Reflect.on(testClass).call("main", "1").get(); assertEquals(3, result); assertEquals(0, tracer.spans.size()); - assertEquals(1, mockSink.getCurrentDiagnostics().size()); - assertEquals( - "No line info for range 4-10", mockSink.getCurrentDiagnostics().get(0).getMessage()); + verify(probeStatusSink).addError(eq(SPAN_ID), eq("No line info for range 4-10")); } @Test @@ -128,7 +142,14 @@ private MockTracer installSpanProbes(Configuration configuration) { Config config = mock(Config.class); when(config.isDebuggerEnabled()).thenReturn(true); when(config.isDebuggerClassFileDumpEnabled()).thenReturn(true); - currentTransformer = new DebuggerTransformer(config, configuration); + when(config.isDebuggerVerifyByteCode()).thenReturn(true); + when(config.getFinalDebuggerSnapshotUrl()) + .thenReturn("http://localhost:8126/debugger/v1/input"); + when(config.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost:8126/symdb/v1/input"); + probeStatusSink = mock(ProbeStatusSink.class); + currentTransformer = + new DebuggerTransformer( + config, configuration, null, new DebuggerSink(config, probeStatusSink)); instr.addTransformer(currentTransformer); mockSink = new MockSink(); DebuggerAgentHelper.injectSink(mockSink); @@ -185,20 +206,16 @@ public boolean isFinished() { } private static SpanProbe createSpan( - String id, String typeName, String methodName, String signature, String[] tags) { + ProbeId id, String typeName, String methodName, String signature, String[] tags) { return SpanProbe.builder() - .probeId(id, 0) + .probeId(id) .where(typeName, methodName, signature) .tags(tags) .build(); } private static SpanProbe createSpan( - String id, String sourceFile, int lineFrom, int lineTill, String[] tags) { - return SpanProbe.builder() - .probeId(id, 0) - .where(sourceFile, lineFrom, lineTill) - .tags(tags) - .build(); + ProbeId id, String sourceFile, int lineFrom, int lineTill, String[] tags) { + return SpanProbe.builder().probeId(id).where(sourceFile, lineFrom, lineTill).tags(tags).build(); } } diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/sink/DebuggerSinkTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/sink/DebuggerSinkTest.java index c3792e2053b..61a40fa136f 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/sink/DebuggerSinkTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/sink/DebuggerSinkTest.java @@ -71,6 +71,7 @@ void setUp() { when(config.getEnv()).thenReturn("test"); when(config.getVersion()).thenReturn("foo"); when(config.getDebuggerUploadBatchSize()).thenReturn(1); + when(config.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost:8126/symdb/v1/input"); EXPECTED_SNAPSHOT_TAGS = "^env:test,version:foo,debugger_version:\\d+\\.\\d+\\.\\d+(-SNAPSHOT)?~[0-9a-f]+,agent_version:null,host_name:" @@ -358,36 +359,9 @@ public void reconsiderFlushIntervalNoChange() { public void addSnapshotWithCorrelationIdsMethodProbe() throws URISyntaxException, IOException { DebuggerSink sink = new DebuggerSink(config, batchUploader); DebuggerAgentHelper.injectSerializer(new JsonSnapshotSerializer()); - CapturedContext entry = new CapturedContext(); - entry.addFields( - new CapturedValue[] { - CapturedValue.of("dd.trace_id", "java.lang.String", "123"), - CapturedValue.of("dd.span_id", "java.lang.String", "456"), - }); - Snapshot snapshot = createSnapshot(); - snapshot.setEntry(entry); - sink.addSnapshot(snapshot); - sink.flush(sink); - verify(batchUploader).upload(payloadCaptor.capture(), matches(EXPECTED_SNAPSHOT_TAGS)); - String strPayload = new String(payloadCaptor.getValue(), StandardCharsets.UTF_8); - System.out.println(strPayload); - JsonSnapshotSerializer.IntakeRequest intakeRequest = assertOneIntakeRequest(strPayload); - assertEquals("123", intakeRequest.getTraceId()); - assertEquals("456", intakeRequest.getSpanId()); - } - - @Test - public void addSnapshotWithCorrelationIdsLineProbe() throws URISyntaxException, IOException { - DebuggerSink sink = new DebuggerSink(config, batchUploader); - DebuggerAgentHelper.injectSerializer(new JsonSnapshotSerializer()); - CapturedContext line = new CapturedContext(); - line.addFields( - new CapturedValue[] { - CapturedValue.of("dd.trace_id", "java.lang.String", "123"), - CapturedValue.of("dd.span_id", "java.lang.String", "456"), - }); Snapshot snapshot = createSnapshot(); - snapshot.addLine(line, 25); + snapshot.setTraceId("123"); + snapshot.setSpanId("456"); sink.addSnapshot(snapshot); sink.flush(sink); verify(batchUploader).upload(payloadCaptor.capture(), matches(EXPECTED_SNAPSHOT_TAGS)); diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/sink/SymbolSinkTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/sink/SymbolSinkTest.java new file mode 100644 index 00000000000..d158780d92e --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/sink/SymbolSinkTest.java @@ -0,0 +1,53 @@ +package com.datadog.debugger.sink; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datadog.debugger.symbol.Scope; +import com.datadog.debugger.symbol.ScopeType; +import com.datadog.debugger.uploader.BatchUploader; +import datadog.trace.api.Config; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.Test; + +class SymbolSinkTest { + + @Test + public void testFlush() { + SymbolUploaderMock symbolUploaderMock = new SymbolUploaderMock(); + Config config = mock(Config.class); + when(config.getServiceName()).thenReturn("service1"); + SymbolSink symbolSink = new SymbolSink(config, symbolUploaderMock); + symbolSink.addScope(Scope.builder(ScopeType.JAR, null, 0, 0).build()); + symbolSink.flush(); + assertEquals(2, symbolUploaderMock.multiPartContents.size()); + BatchUploader.MultiPartContent eventContent = symbolUploaderMock.multiPartContents.get(0); + assertEquals("event", eventContent.getPartName()); + assertEquals("event.json", eventContent.getFileName()); + String strEventContent = new String(eventContent.getContent()); + assertTrue(strEventContent.contains("\"ddsource\": \"dd_debugger\"")); + assertTrue(strEventContent.contains("\"service\": \"service1\"")); + BatchUploader.MultiPartContent symbolContent = symbolUploaderMock.multiPartContents.get(1); + assertEquals("file", symbolContent.getPartName()); + assertEquals("file.json", symbolContent.getFileName()); + assertEquals( + "{\"language\":\"JAVA\",\"scopes\":[{\"end_line\":0,\"scope_type\":\"JAR\",\"start_line\":0}],\"service\":\"service1\"}", + new String(symbolContent.getContent())); + } + + static class SymbolUploaderMock extends BatchUploader { + final List multiPartContents = new ArrayList<>(); + + public SymbolUploaderMock() { + super(Config.get(), "http://localhost"); + } + + @Override + public void uploadAsMultipart(String tags, MultiPartContent... parts) { + multiPartContents.addAll(Arrays.asList(parts)); + } + } +} diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/symbol/SymbolExtractionTransformerTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/symbol/SymbolExtractionTransformerTest.java new file mode 100644 index 00000000000..990efce48e6 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/symbol/SymbolExtractionTransformerTest.java @@ -0,0 +1,724 @@ +package com.datadog.debugger.symbol; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.mockito.Mockito.when; +import static utils.InstrumentationTestHelper.compileAndLoadClass; + +import com.datadog.debugger.sink.SymbolSink; +import datadog.trace.api.Config; +import java.io.IOException; +import java.lang.instrument.Instrumentation; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; +import net.bytebuddy.agent.ByteBuddyAgent; +import org.joor.Reflect; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +class SymbolExtractionTransformerTest { + private static final String SYMBOL_PACKAGE = "com.datadog.debugger.symbol."; + private static final String SYMBOL_PACKAGE_DIR = SYMBOL_PACKAGE.replace('.', '/'); + + private Instrumentation instr = ByteBuddyAgent.install(); + private Config config; + + @BeforeEach + public void setUp() { + config = Mockito.mock(Config.class); + when(config.getDebuggerSymbolIncludes()).thenReturn(SYMBOL_PACKAGE); + when(config.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost:8126/symdb/v1/input"); + when(config.getDebuggerSymbolFlushThreshold()).thenReturn(1); + } + + @Test + public void noIncludesFilterOutDatadogClass() throws IOException, URISyntaxException { + config = Mockito.mock(Config.class); + when(config.getFinalDebuggerSymDBUrl()).thenReturn("http://localhost:8126/symdb/v1/input"); + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction01"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + assertFalse( + symbolSinkMock.jarScopes.stream() + .flatMap(scope -> scope.getScopes().stream()) + .anyMatch(scope -> scope.getName().equals(CLASS_NAME))); + } + + @Test + public void symbolExtraction01() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction01"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction01.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 2, 20, SOURCE_FILE, 2, 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 2, 2, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 4, 20, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", String.class.getTypeName(), 4); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 4, 20, SOURCE_FILE, 1, 2); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + Integer.TYPE.getTypeName(), + 4); + assertSymbol( + mainMethodLocalScope.getSymbols().get(1), + SymbolType.LOCAL, + "var3", + Integer.TYPE.getTypeName(), + 19); + Scope ifLine5Scope = mainMethodLocalScope.getScopes().get(0); + assertScope(ifLine5Scope, ScopeType.LOCAL, null, 6, 17, SOURCE_FILE, 1, 1); + assertSymbol( + ifLine5Scope.getSymbols().get(0), SymbolType.LOCAL, "var2", Integer.TYPE.getTypeName(), 6); + Scope forLine7Scope = ifLine5Scope.getScopes().get(0); + assertScope(forLine7Scope, ScopeType.LOCAL, null, 7, 15, SOURCE_FILE, 1, 1); + assertSymbol( + forLine7Scope.getSymbols().get(0), SymbolType.LOCAL, "i", Integer.TYPE.getTypeName(), 7); + Scope forBodyLine7Scope = forLine7Scope.getScopes().get(0); + assertScope(forBodyLine7Scope, ScopeType.LOCAL, null, 8, 15, SOURCE_FILE, 1, 3); + assertSymbol( + forBodyLine7Scope.getSymbols().get(0), + SymbolType.LOCAL, + "foo", + Integer.TYPE.getTypeName(), + 8); + assertSymbol( + forBodyLine7Scope.getSymbols().get(1), + SymbolType.LOCAL, + "bar", + Integer.TYPE.getTypeName(), + 9); + assertSymbol( + forBodyLine7Scope.getSymbols().get(2), + SymbolType.LOCAL, + "j", + Integer.TYPE.getTypeName(), + 11); + Scope whileLine12 = forBodyLine7Scope.getScopes().get(0); + assertScope(whileLine12, ScopeType.LOCAL, null, 13, 14, SOURCE_FILE, 0, 1); + assertSymbol( + whileLine12.getSymbols().get(0), SymbolType.LOCAL, "var4", Integer.TYPE.getTypeName(), 13); + } + + @Test + public void symbolExtraction02() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction02"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction02.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 3, 6, SOURCE_FILE, 2, 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 3, 3, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 5, 6, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", String.class.getTypeName(), 5); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 5, 6, SOURCE_FILE, 0, 1); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + String.class.getTypeName(), + 5); + } + + @Test + public void symbolExtraction03() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction03"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction03.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 4, 28, SOURCE_FILE, 2, 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 4, 4, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 6, 28, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", String.class.getTypeName(), 6); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 6, 28, SOURCE_FILE, 2, 2); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + String.class.getTypeName(), + 6); + assertSymbol( + mainMethodLocalScope.getSymbols().get(1), + SymbolType.LOCAL, + "var5", + String.class.getTypeName(), + 27); + Scope elseLine10Scope = mainMethodLocalScope.getScopes().get(0); + assertScope(elseLine10Scope, ScopeType.LOCAL, null, 12, 24, SOURCE_FILE, 1, 4); + assertSymbol( + elseLine10Scope.getSymbols().get(0), + SymbolType.LOCAL, + "var31", + String.class.getTypeName(), + 12); + assertSymbol( + elseLine10Scope.getSymbols().get(1), + SymbolType.LOCAL, + "var32", + String.class.getTypeName(), + 13); + assertSymbol( + elseLine10Scope.getSymbols().get(2), + SymbolType.LOCAL, + "var30", + String.class.getTypeName(), + 15); + assertSymbol( + elseLine10Scope.getSymbols().get(3), + SymbolType.LOCAL, + "var3", + String.class.getTypeName(), + 17); + Scope ifLine19Scope = elseLine10Scope.getScopes().get(0); + assertScope(ifLine19Scope, ScopeType.LOCAL, null, 20, 21, SOURCE_FILE, 0, 1); + assertSymbol( + ifLine19Scope.getSymbols().get(0), + SymbolType.LOCAL, + "var4", + String.class.getTypeName(), + 20); + } + + @Test + public void symbolExtraction04() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction04"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction04.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 3, 18, SOURCE_FILE, 2, 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 3, 3, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 5, 18, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", String.class.getTypeName(), 5); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 5, 18, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + String.class.getTypeName(), + 5); + Scope forLine6Scope = mainMethodLocalScope.getScopes().get(0); + assertScope(forLine6Scope, ScopeType.LOCAL, null, 6, 15, SOURCE_FILE, 1, 1); + assertSymbol( + forLine6Scope.getSymbols().get(0), SymbolType.LOCAL, "i", Integer.TYPE.getTypeName(), 6); + Scope forBodyLine6Scope = forLine6Scope.getScopes().get(0); + assertScope(forBodyLine6Scope, ScopeType.LOCAL, null, 7, 15, SOURCE_FILE, 1, 2); + assertSymbol( + forBodyLine6Scope.getSymbols().get(0), + SymbolType.LOCAL, + "j", + Integer.TYPE.getTypeName(), + 8); + assertSymbol( + forBodyLine6Scope.getSymbols().get(1), + SymbolType.LOCAL, + "var2", + String.class.getTypeName(), + 7); + Scope forBodyLine8Scope = forBodyLine6Scope.getScopes().get(0); + assertScope(forBodyLine8Scope, ScopeType.LOCAL, null, 9, 15, SOURCE_FILE, 1, 2); + assertSymbol( + forBodyLine8Scope.getSymbols().get(0), + SymbolType.LOCAL, + "var3", + String.class.getTypeName(), + 9); + assertSymbol( + forBodyLine8Scope.getSymbols().get(1), + SymbolType.LOCAL, + "var5", + String.class.getTypeName(), + 14); + Scope forLine10Scope = forBodyLine8Scope.getScopes().get(0); + assertScope(forLine10Scope, ScopeType.LOCAL, null, 10, 12, SOURCE_FILE, 1, 1); + assertSymbol( + forLine10Scope.getSymbols().get(0), SymbolType.LOCAL, "k", Integer.TYPE.getTypeName(), 10); + Scope forBodyLine10Scope = forLine10Scope.getScopes().get(0); + assertScope(forBodyLine10Scope, ScopeType.LOCAL, null, 11, 12, SOURCE_FILE, 0, 1); + assertSymbol( + forBodyLine10Scope.getSymbols().get(0), + SymbolType.LOCAL, + "var4", + String.class.getTypeName(), + 11); + } + + @Test + public void symbolExtraction05() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction05"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction05.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 3, 15, SOURCE_FILE, 2, 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 3, 3, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 5, 15, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", String.class.getTypeName(), 5); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 5, 15, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "i", + Integer.TYPE.getTypeName(), + 5); + Scope whileLine6Scope = mainMethodLocalScope.getScopes().get(0); + assertScope(whileLine6Scope, ScopeType.LOCAL, null, 7, 13, SOURCE_FILE, 1, 2); + assertSymbol( + whileLine6Scope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + Integer.TYPE.getTypeName(), + 7); + assertSymbol( + whileLine6Scope.getSymbols().get(1), SymbolType.LOCAL, "j", Integer.TYPE.getTypeName(), 8); + Scope whileLine9Scope = whileLine6Scope.getScopes().get(0); + assertScope(whileLine9Scope, ScopeType.LOCAL, null, 10, 11, SOURCE_FILE, 0, 1); + assertSymbol( + whileLine9Scope.getSymbols().get(0), + SymbolType.LOCAL, + "var2", + Integer.TYPE.getTypeName(), + 10); + } + + @Test + public void symbolExtraction06() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction06"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction06.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 3, 13, SOURCE_FILE, 2, 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 3, 3, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 5, 13, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", String.class.getTypeName(), 5); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 5, 13, SOURCE_FILE, 2, 1); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + Integer.TYPE.getTypeName(), + 5); + Scope catchLine9Scope = mainMethodLocalScope.getScopes().get(0); + assertScope(catchLine9Scope, ScopeType.LOCAL, null, 9, 11, SOURCE_FILE, 0, 2); + assertSymbol( + catchLine9Scope.getSymbols().get(0), + SymbolType.LOCAL, + "var3", + Integer.TYPE.getTypeName(), + 10); + assertSymbol( + catchLine9Scope.getSymbols().get(1), + SymbolType.LOCAL, + "rte", + RuntimeException.class.getTypeName(), + 9); + Scope tryLine6Scope = mainMethodLocalScope.getScopes().get(1); + assertScope(tryLine6Scope, ScopeType.LOCAL, null, 7, 8, SOURCE_FILE, 0, 1); + assertSymbol( + tryLine6Scope.getSymbols().get(0), SymbolType.LOCAL, "var2", Integer.TYPE.getTypeName(), 7); + } + + @Test + public void symbolExtraction07() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction07"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction07.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 3, 10, SOURCE_FILE, 2, 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 3, 3, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 5, 10, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", String.class.getTypeName(), 5); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 5, 10, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "i", + Integer.TYPE.getTypeName(), + 5); + Scope doLine6Scope = mainMethodLocalScope.getScopes().get(0); + assertScope(doLine6Scope, ScopeType.LOCAL, null, 7, 8, SOURCE_FILE, 0, 1); + assertSymbol( + doLine6Scope.getSymbols().get(0), SymbolType.LOCAL, "j", Integer.TYPE.getTypeName(), 7); + } + + @Test + public void symbolExtraction08() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction08"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction08.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 3, 11, SOURCE_FILE, 2, 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 3, 3, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 5, 11, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", String.class.getTypeName(), 5); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 5, 11, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + Integer.TYPE.getTypeName(), + 5); + Scope line6Scope = mainMethodLocalScope.getScopes().get(0); + assertScope(line6Scope, ScopeType.LOCAL, null, 7, 9, SOURCE_FILE, 0, 2); + assertSymbol( + line6Scope.getSymbols().get(0), SymbolType.LOCAL, "var2", Integer.TYPE.getTypeName(), 7); + assertSymbol( + line6Scope.getSymbols().get(1), SymbolType.LOCAL, "var3", Integer.TYPE.getTypeName(), 8); + } + + @Test + public void symbolExtraction09() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction09"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction09.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 5, 23, SOURCE_FILE, 6, 2); + assertSymbol( + classScope.getSymbols().get(0), + SymbolType.STATIC_FIELD, + "staticIntField", + Integer.TYPE.getTypeName(), + 0); + assertSymbol( + classScope.getSymbols().get(1), + SymbolType.FIELD, + "intField", + Integer.TYPE.getTypeName(), + 0); + assertScope( + classScope.getScopes().get(0), ScopeType.METHOD, "", 5, 17, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 8, 14, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", String.class.getTypeName(), 8); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 8, 14, SOURCE_FILE, 0, 3); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "outside", + Integer.TYPE.getTypeName(), + 8); + assertSymbol( + mainMethodLocalScope.getSymbols().get(1), + SymbolType.LOCAL, + "outside2", + Integer.TYPE.getTypeName(), + 9); + assertSymbol( + mainMethodLocalScope.getSymbols().get(2), + SymbolType.LOCAL, + "lambda", + Supplier.class.getTypeName(), + 10); + Scope processMethodScope = classScope.getScopes().get(2); + assertScope(processMethodScope, ScopeType.METHOD, "process", 19, 23, SOURCE_FILE, 1, 0); + Scope processMethodLocalScope = processMethodScope.getScopes().get(0); + assertScope(processMethodLocalScope, ScopeType.LOCAL, null, 19, 23, SOURCE_FILE, 0, 1); + assertSymbol( + processMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "supplier", + Supplier.class.getTypeName(), + 19); + Scope supplierClosureScope = classScope.getScopes().get(3); + assertScope( + supplierClosureScope, ScopeType.CLOSURE, "lambda$process$1", 20, 21, SOURCE_FILE, 1, 0); + Scope supplierClosureLocalScope = supplierClosureScope.getScopes().get(0); + assertScope(supplierClosureLocalScope, ScopeType.LOCAL, null, 20, 21, SOURCE_FILE, 0, 1); + assertSymbol( + supplierClosureLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + Integer.TYPE.getTypeName(), + 20); + Scope lambdaClosureScope = classScope.getScopes().get(4); + assertScope(lambdaClosureScope, ScopeType.CLOSURE, "lambda$main$0", 11, 12, SOURCE_FILE, 1, 1); + assertSymbol( + lambdaClosureScope.getSymbols().get(0), + SymbolType.ARG, + "outside", + Integer.TYPE.getTypeName(), + 11); + Scope lambdaMethodLocalScope = lambdaClosureScope.getScopes().get(0); + assertScope(lambdaMethodLocalScope, ScopeType.LOCAL, null, 11, 12, SOURCE_FILE, 0, 1); + assertSymbol( + lambdaMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + Integer.TYPE.getTypeName(), + 11); + Scope clinitMethodScope = classScope.getScopes().get(5); + assertScope(clinitMethodScope, ScopeType.METHOD, "", 6, 6, SOURCE_FILE, 0, 0); + } + + @Test + public void symbolExtraction10() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction10"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction10.java"; + when(config.getDebuggerSymbolFlushThreshold()).thenReturn(2); + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", "1").get(); + assertEquals(2, symbolSinkMock.jarScopes.get(0).getScopes().size()); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 3, 6, SOURCE_FILE, 2, 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 3, 3, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 5, 6, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", String.class.getTypeName(), 5); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 5, 6, SOURCE_FILE, 0, 1); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "winner", + "com.datadog.debugger.symbol.SymbolExtraction10$Inner", + 5); + Scope innerClassScope = symbolSinkMock.jarScopes.get(0).getScopes().get(1); + assertScope(innerClassScope, ScopeType.CLASS, CLASS_NAME + "$Inner", 9, 13, SOURCE_FILE, 2, 1); + assertSymbol( + innerClassScope.getSymbols().get(0), + SymbolType.FIELD, + "field1", + Integer.TYPE.getTypeName(), + 0); + assertScope( + innerClassScope.getScopes().get(0), ScopeType.METHOD, "", 9, 10, SOURCE_FILE, 0, 0); + Scope addToMethod = innerClassScope.getScopes().get(1); + assertScope(addToMethod, ScopeType.METHOD, "addTo", 12, 13, SOURCE_FILE, 1, 1); + assertSymbol( + addToMethod.getSymbols().get(0), SymbolType.ARG, "arg", Integer.TYPE.getTypeName(), 12); + Scope addToMethodLocalScope = addToMethod.getScopes().get(0); + assertScope(addToMethodLocalScope, ScopeType.LOCAL, null, 12, 13, SOURCE_FILE, 0, 1); + assertSymbol( + addToMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + Integer.TYPE.getTypeName(), + 12); + } + + @Test + public void symbolExtraction11() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction11"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction11.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", 1).get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 3, 11, SOURCE_FILE, 2, 1); + assertSymbol( + classScope.getSymbols().get(0), SymbolType.FIELD, "field1", Integer.TYPE.getTypeName(), 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 3, 4, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 6, 11, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", Integer.TYPE.getTypeName(), 6); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 6, 11, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "var1", + Integer.TYPE.getTypeName(), + 6); + Scope ifLine7Scope = mainMethodLocalScope.getScopes().get(0); + assertScope(ifLine7Scope, ScopeType.LOCAL, null, 8, 9, SOURCE_FILE, 0, 1); + assertSymbol( + ifLine7Scope.getSymbols().get(0), SymbolType.LOCAL, "var2", Integer.TYPE.getTypeName(), 8); + } + + @Test + public void symbolExtraction12() throws IOException, URISyntaxException { + final String CLASS_NAME = SYMBOL_PACKAGE + "SymbolExtraction12"; + final String SOURCE_FILE = SYMBOL_PACKAGE_DIR + "SymbolExtraction12.java"; + SymbolSinkMock symbolSinkMock = new SymbolSinkMock(config); + SymbolExtractionTransformer transformer = + new SymbolExtractionTransformer(symbolSinkMock, config); + instr.addTransformer(transformer); + Class testClass = compileAndLoadClass(CLASS_NAME); + Reflect.on(testClass).call("main", 1).get(); + Scope classScope = symbolSinkMock.jarScopes.get(0).getScopes().get(0); + assertScope(classScope, ScopeType.CLASS, CLASS_NAME, 6, 20, SOURCE_FILE, 7, 0); + assertScope(classScope.getScopes().get(0), ScopeType.METHOD, "", 6, 6, SOURCE_FILE, 0, 0); + Scope mainMethodScope = classScope.getScopes().get(1); + assertScope(mainMethodScope, ScopeType.METHOD, "main", 8, 13, SOURCE_FILE, 1, 1); + assertSymbol( + mainMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", Integer.TYPE.getTypeName(), 8); + Scope mainMethodLocalScope = mainMethodScope.getScopes().get(0); + assertScope(mainMethodLocalScope, ScopeType.LOCAL, null, 8, 13, SOURCE_FILE, 0, 2); + assertSymbol( + mainMethodLocalScope.getSymbols().get(0), + SymbolType.LOCAL, + "list", + List.class.getTypeName(), + 8); + assertSymbol( + mainMethodLocalScope.getSymbols().get(1), + SymbolType.LOCAL, + "sum", + Integer.TYPE.getTypeName(), + 12); + Scope fooMethodScope = classScope.getScopes().get(2); + assertScope(fooMethodScope, ScopeType.METHOD, "foo", 17, 20, SOURCE_FILE, 0, 1); + assertSymbol( + fooMethodScope.getSymbols().get(0), SymbolType.ARG, "arg", Integer.TYPE.getTypeName(), 17); + Scope lambdaFoo3MethodScope = classScope.getScopes().get(3); + assertScope( + lambdaFoo3MethodScope, ScopeType.CLOSURE, "lambda$foo$3", 19, 19, SOURCE_FILE, 0, 1); + assertSymbol( + lambdaFoo3MethodScope.getSymbols().get(0), + SymbolType.ARG, + "x", + Integer.TYPE.getTypeName(), + 19); + Scope lambdaFoo2MethodScope = classScope.getScopes().get(4); + assertScope( + lambdaFoo2MethodScope, ScopeType.CLOSURE, "lambda$foo$2", 19, 19, SOURCE_FILE, 0, 1); + assertSymbol( + lambdaFoo2MethodScope.getSymbols().get(0), + SymbolType.ARG, + "x", + Integer.class.getTypeName(), + 19); + Scope lambdaMain1MethodScope = classScope.getScopes().get(5); + assertScope( + lambdaMain1MethodScope, ScopeType.CLOSURE, "lambda$main$1", 11, 11, SOURCE_FILE, 0, 1); + assertSymbol( + lambdaMain1MethodScope.getSymbols().get(0), + SymbolType.ARG, + "x", + Integer.TYPE.getTypeName(), + 11); + Scope lambdaMain0MethodScope = classScope.getScopes().get(6); + assertScope( + lambdaMain0MethodScope, ScopeType.CLOSURE, "lambda$main$0", 11, 11, SOURCE_FILE, 0, 1); + assertSymbol( + lambdaMain0MethodScope.getSymbols().get(0), + SymbolType.ARG, + "x", + Integer.class.getTypeName(), + 11); + } + + private static void assertScope( + Scope scope, + ScopeType scopeType, + String name, + int startLine, + int endLine, + String sourceFile, + int nbScopes, + int nbSymbols) { + assertEquals(scopeType, scope.getScopeType()); + assertEquals(name, scope.getName()); + assertEquals(startLine, scope.getStartLine()); + assertEquals(endLine, scope.getEndLine()); + assertEquals(sourceFile, scope.getSourceFile()); + assertEquals(nbScopes, scope.getScopes().size()); + assertEquals(nbSymbols, scope.getSymbols().size()); + } + + private void assertSymbol( + Symbol symbol, SymbolType symbolType, String name, String type, int line) { + assertEquals(symbolType, symbol.getSymbolType()); + assertEquals(name, symbol.getName()); + assertEquals(type, symbol.getType()); + assertEquals(line, symbol.getLine()); + } + + static class SymbolSinkMock extends SymbolSink { + final List jarScopes = new ArrayList<>(); + + public SymbolSinkMock(Config config) { + super(config); + } + + @Override + public boolean addScope(Scope jarScope) { + return jarScopes.add(jarScope); + } + } +} diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/uploader/BatchUploaderTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/uploader/BatchUploaderTest.java index c2fe8cf06b0..99a9e7db46e 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/uploader/BatchUploaderTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/uploader/BatchUploaderTest.java @@ -1,5 +1,6 @@ package com.datadog.debugger.uploader; +import static com.datadog.debugger.uploader.BatchUploader.HEADER_DD_API_KEY; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; @@ -13,10 +14,8 @@ import datadog.trace.relocate.api.RatelimitedLogger; import java.io.IOException; import java.net.ConnectException; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.Duration; -import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import okhttp3.ConnectionSpec; @@ -41,6 +40,7 @@ public class BatchUploaderTest { private final Duration REQUEST_TIMEOUT = Duration.ofSeconds(10); private final Duration REQUEST_IO_OPERATION_TIMEOUT = Duration.ofSeconds(5); private final Duration FOREVER_REQUEST_TIMEOUT = Duration.ofSeconds(1000); + private static final String API_KEY_VALUE = "testkey"; @Mock private Config config; @Mock private RatelimitedLogger ratelimitedLogger; @@ -55,10 +55,9 @@ public void setup() throws IOException { server.start(); url = server.url(URL_PATH); - when(config.getFinalDebuggerSnapshotUrl()).thenReturn(server.url(URL_PATH).toString()); when(config.getDebuggerUploadTimeout()).thenReturn((int) REQUEST_TIMEOUT.getSeconds()); - uploader = new BatchUploader(config, ratelimitedLogger); + uploader = new BatchUploader(config, url.toString(), ratelimitedLogger); } @AfterEach @@ -73,9 +72,7 @@ public void tearDown() throws IOException { @Test void testOkHttpClientForcesCleartextConnspecWhenNotUsingTLS() { - when(config.getFinalDebuggerSnapshotUrl()).thenReturn("http://example.com"); - - uploader = new BatchUploader(config); + uploader = new BatchUploader(config, "http://example.com"); final List connectionSpecs = uploader.getClient().connectionSpecs(); assertEquals(connectionSpecs.size(), 1); @@ -84,9 +81,7 @@ void testOkHttpClientForcesCleartextConnspecWhenNotUsingTLS() { @Test void testOkHttpClientUsesDefaultConnspecsOverTLS() { - when(config.getFinalDebuggerSnapshotUrl()).thenReturn("https://example.com"); - - uploader = new BatchUploader(config); + uploader = new BatchUploader(config, "https://example.com"); final List connectionSpecs = uploader.getClient().connectionSpecs(); assertEquals(connectionSpecs.size(), 2); @@ -163,7 +158,7 @@ public void testTooManyRequests() throws IOException, InterruptedException { // We need to make sure that initial requests that fill up the queue hang to the duration of the // test. So we specify insanely large timeout here. when(config.getDebuggerUploadTimeout()).thenReturn((int) FOREVER_REQUEST_TIMEOUT.getSeconds()); - uploader = new BatchUploader(config); + uploader = new BatchUploader(config, url.toString()); // We have to block all parallel requests to make sure queue is kept full for (int i = 0; i < BatchUploader.MAX_RUNNING_REQUESTS; i++) { @@ -178,13 +173,11 @@ public void testTooManyRequests() throws IOException, InterruptedException { uploader.upload(SNAPSHOT_BUFFER); } - final List hangingRequests = new ArrayList<>(); // We schedule one additional request to check case when request would be rejected immediately // rather than added to the queue. for (int i = 0; i < BatchUploader.MAX_ENQUEUED_REQUESTS + 1; i++) { uploader.upload(SNAPSHOT_BUFFER); } - // Make sure all expected requests happened for (int i = 0; i < BatchUploader.MAX_RUNNING_REQUESTS; i++) { assertNotNull(server.takeRequest(5, TimeUnit.SECONDS)); @@ -205,15 +198,15 @@ public void testShutdown() throws IOException, InterruptedException { @Test public void testEmptyUrl() { - when(config.getFinalDebuggerSnapshotUrl()).thenReturn(""); - Assertions.assertThrows(IllegalArgumentException.class, () -> new BatchUploader(config)); + Assertions.assertThrows(IllegalArgumentException.class, () -> new BatchUploader(config, "")); } @Test public void testNoContainerId() throws InterruptedException { // we don't explicitly specify a container ID server.enqueue(new MockResponse().setResponseCode(200)); - BatchUploader uploaderWithNoContainerId = new BatchUploader(config, ratelimitedLogger, null); + BatchUploader uploaderWithNoContainerId = + new BatchUploader(config, url.toString(), ratelimitedLogger, null); uploaderWithNoContainerId.upload(SNAPSHOT_BUFFER); uploaderWithNoContainerId.shutdown(); @@ -227,11 +220,24 @@ public void testContainerIdHeader() throws InterruptedException { server.enqueue(new MockResponse().setResponseCode(200)); BatchUploader uploaderWithContainerId = - new BatchUploader(config, ratelimitedLogger, "testContainerId"); + new BatchUploader(config, url.toString(), ratelimitedLogger, "testContainerId"); uploaderWithContainerId.upload(SNAPSHOT_BUFFER); uploaderWithContainerId.shutdown(); RecordedRequest request = server.takeRequest(100, TimeUnit.MILLISECONDS); assertEquals("testContainerId", request.getHeader("Datadog-Container-ID")); } + + @Test + public void testApiKey() throws InterruptedException { + server.enqueue(new MockResponse().setResponseCode(200)); + when(config.getApiKey()).thenReturn(API_KEY_VALUE); + + BatchUploader uploaderWithApiKey = new BatchUploader(config, url.toString(), ratelimitedLogger); + uploaderWithApiKey.upload(SNAPSHOT_BUFFER); + uploaderWithApiKey.shutdown(); + + RecordedRequest request = server.takeRequest(100, TimeUnit.MILLISECONDS); + assertEquals(API_KEY_VALUE, request.getHeader(HEADER_DD_API_KEY)); + } } diff --git a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/util/StringTokenWriterTest.java b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/util/StringTokenWriterTest.java index 1113191d087..71dc9950db2 100644 --- a/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/util/StringTokenWriterTest.java +++ b/dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/util/StringTokenWriterTest.java @@ -99,20 +99,15 @@ public void maps() throws Exception { } @Test - public void arrayListSizeThrows() throws Exception { - class MyArrayList extends ArrayList { - @Override - public int size() { - throw new UnsupportedOperationException("size"); - } - } + public void collectionUnknown() throws Exception { + class MyArrayList extends ArrayList {} assertEquals( - "[](Error: java.lang.UnsupportedOperationException: size)", + "[](Error: java.lang.RuntimeException: Unsupported Collection type: com.datadog.debugger.util.StringTokenWriterTest$1MyArrayList)", serializeValue(new MyArrayList<>(), Limits.DEFAULT)); } @Test - public void entrySetThrows() throws Exception { + public void mapUnknown() throws Exception { class MyMap extends HashMap { @Override public Set> entrySet() { @@ -120,7 +115,7 @@ public Set> entrySet() { } } assertEquals( - "{}(Error: java.lang.UnsupportedOperationException: entrySet)", + "{}(Error: java.lang.RuntimeException: Unsupported Map type: com.datadog.debugger.util.StringTokenWriterTest$1MyMap)", serializeValue(new MyMap(), Limits.DEFAULT)); } diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot20.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot20.java index 4ceff843986..07e5fa48828 100644 --- a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot20.java +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot20.java @@ -34,6 +34,7 @@ public static int main(String arg) { } private int process(String arg) { - return intField; + int intLocal = intField + 42; + return intLocal; } } diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot24.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot24.java index 8bfa6e43b3b..3bb7c2aecb7 100644 --- a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot24.java +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot24.java @@ -1,9 +1,6 @@ package com.datadog.debugger; import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; public class CapturedSnapshot24 { diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot26.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot26.java new file mode 100644 index 00000000000..f75d7af6909 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot26.java @@ -0,0 +1,53 @@ +package com.datadog.debugger; + +import java.util.HashMap; + +public class CapturedSnapshot26 { + + private Holder holder = new Holder<>(this); + private HolderWithException holderWithException = new HolderWithException<>(this); + + public static int main(String arg) { + CapturedSnapshot26 cs26 = new CapturedSnapshot26(); + if ("exception".equals(arg)) { + return cs26.doItException(arg); + } + return cs26.doit(arg); + } + + private int doit(String arg) { + holder.put("foo", "bar"); + return holder.size(); + } + + private int doItException(String arg) { + holderWithException.put("foo", "bar"); + return holderWithException.size(); + } + + static class Holder extends HashMap { + private final CapturedSnapshot26 capturedSnapshot26; + + public Holder(CapturedSnapshot26 capturedSnapshot26) { + this.capturedSnapshot26 = capturedSnapshot26; + } + + @Override + public int size() { + return super.size(); + } + } + + static class HolderWithException extends HashMap { + private final CapturedSnapshot26 capturedSnapshot26; + + public HolderWithException(CapturedSnapshot26 capturedSnapshot26) { + this.capturedSnapshot26 = capturedSnapshot26; + } + + @Override + public int size() { + throw new UnsupportedOperationException("not supported"); + } + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot27.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot27.java new file mode 100644 index 00000000000..df2c9f41a0c --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot27.java @@ -0,0 +1,40 @@ +package com.datadog.debugger; + +import java.util.HashMap; +import java.util.Map; + +public class CapturedSnapshot27 { + private HashMap strMap = new HashMap<>(); + private Map credMap = new HashMap<>(); + { + strMap.put("foo1", "bar1"); + strMap.put("foo3", "bar3"); + credMap.put("dave", new Creds("dave", "secret456")); + } + + private String password; + private Creds creds; + + private int doit(String arg) { + creds = new Creds("john", arg); + password = arg; + String secret = arg; + strMap.put("password", arg); + return 42; + } + + public static int main(String arg) { + CapturedSnapshot27 cs27 = new CapturedSnapshot27(); + return cs27.doit(arg); + } + + static class Creds { + private String user; + private String secretCode; + + public Creds(String user, String secretCode) { + this.user = user; + this.secretCode = secretCode; + } + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot28.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot28.java new file mode 100644 index 00000000000..160472ecf92 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/CapturedSnapshot28.java @@ -0,0 +1,54 @@ +package com.datadog.debugger; + +import datadog.trace.agent.tooling.TracerInstaller; +import datadog.trace.bootstrap.instrumentation.api.AgentScope; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; +import datadog.trace.bootstrap.instrumentation.api.ScopeSource; +import datadog.trace.core.CoreTracer; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class CapturedSnapshot28 { + private String password; + private Creds creds; + private final Map strMap = new HashMap<>(); + private final Map credMap = new HashMap<>(); + { + strMap.put("foo1", "bar1"); + strMap.put("foo2", "bar2"); + strMap.put("foo3", "bar3"); + credMap.put("dave", new Creds("dave", "secret456")); + } + + public static int main(String arg) { + AgentTracer.TracerAPI tracerAPI = AgentTracer.get(); + AgentSpan span = tracerAPI.buildSpan("process").start(); + try (AgentScope scope = tracerAPI.activateSpan(span, ScopeSource.MANUAL)) { + return new CapturedSnapshot28().process(arg); + } finally { + span.finish(); + } + } + + private int process(String arg) { + creds = new Creds("john", arg); + password = arg; + String secret = arg; + strMap.put("password", arg); + return 42; + } + + static class Creds { + private String user; + private String secretCode; + + public Creds(String user, String secretCode) { + this.user = user; + this.secretCode = secretCode; + } + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/classfiles/JavacBug.class b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/classfiles/JavacBug.class new file mode 100644 index 00000000000..58c4aa2d0e5 Binary files /dev/null and b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/classfiles/JavacBug.class differ diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction01.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction01.java new file mode 100644 index 00000000000..aea27733512 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction01.java @@ -0,0 +1,22 @@ +package com.datadog.debugger.symbol; +public class SymbolExtraction01 { + public static int main(String arg) { + int var1 = 1; + if (Integer.parseInt(arg) == 2) { + int var2 = 2; + for (int i = 0; i <= 9; i++) { + int foo = 13; + int bar = 13; + System.out.println(i + foo + bar); + int j = 0; + while (j < 10) { + int var4 = 1; + j++; + } + } + return var2; + } + int var3 = 3; + return var1 + var3; + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction02.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction02.java new file mode 100644 index 00000000000..2b0396c23a5 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction02.java @@ -0,0 +1,8 @@ +package com.datadog.debugger.symbol; + +public class SymbolExtraction02 { + public static int main(String arg) { + String var1 = "var1"; + return var1.length(); + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction03.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction03.java new file mode 100644 index 00000000000..bc2be93c149 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction03.java @@ -0,0 +1,30 @@ + +package com.datadog.debugger.symbol; + +public class SymbolExtraction03 { + public static int main(String arg) { + String var1 = "var1"; + if (arg.equals("foo")) { + String var2 = "var2"; + System.out.println(var2); + } else { + System.out.println(var1); + String var31 = "var31"; + String var32 = "var32"; + System.out.println(var1); + String var30 = "var30"; + System.out.println(var1); + String var3 = "var3"; + System.out.println(var3); + if (arg.equals(var3)) { + String var4 = "var4"; + System.out.println(var4); + } + if (arg.equals(var1)) { + return var3.length(); + } + } + String var5 = "var5"; + return var1.length(); + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction04.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction04.java new file mode 100644 index 00000000000..f667317cb92 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction04.java @@ -0,0 +1,20 @@ +package com.datadog.debugger.symbol; + +public class SymbolExtraction04 { + public static int main(String arg) { + String var1 = "var1"; + for (int i = 0; i < 10; i++) { + String var2 = "var2"; + for (int j = 0; j < 10; j++) { + String var3 = "var3"; + for (int k = 0; k < 10; k++) { + String var4 = "var4"; + System.out.println("var4 = " + var4); + } + String var5 = "var5"; + System.out.println("var5 = " + var5); + } + } + return var1.length(); + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction05.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction05.java new file mode 100644 index 00000000000..327adb10431 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction05.java @@ -0,0 +1,17 @@ +package com.datadog.debugger.symbol; + +public class SymbolExtraction05 { + public static int main(String arg) { + int i = 0; + while (i < 10) { + int var1 = 10; + int j = 0; + while (j < 10) { + int var2 = 1; + j++; + } + i++; + } + return arg.length(); + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction06.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction06.java new file mode 100644 index 00000000000..4eeb3173c81 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction06.java @@ -0,0 +1,15 @@ +package com.datadog.debugger.symbol; + +public class SymbolExtraction06 { + public static int main(String arg) { + int var1 = 1; + try { + int var2 = 2; + throw new RuntimeException("" + var1); + } catch (RuntimeException rte) { + int var3 = 3; + System.out.println("rte = " + rte.getMessage() + var3); + } + return arg.length(); + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction07.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction07.java new file mode 100644 index 00000000000..6b3b6f04af1 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction07.java @@ -0,0 +1,12 @@ +package com.datadog.debugger.symbol; + +public class SymbolExtraction07 { + public static int main(String arg) { + int i = 10; + do { + int j = i + 12; + i--; + } while (i > 0); + return arg.length(); + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction08.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction08.java new file mode 100644 index 00000000000..a06a4561a64 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction08.java @@ -0,0 +1,13 @@ +package com.datadog.debugger.symbol; + +public class SymbolExtraction08 { + public static int main(String arg) { + int var1 = 1; + { + int var2 = 2; + int var3 = 3; + int var4 = var2 + var3; // var4 is not in the LocalVariableTable because last statement of the scope + } + return arg.length(); + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction09.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction09.java new file mode 100644 index 00000000000..fac032d858c --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction09.java @@ -0,0 +1,25 @@ +package com.datadog.debugger.symbol; + +import java.util.function.Supplier; + +public class SymbolExtraction09 { + static int staticIntField = 42; + public static int main(String arg) { + int outside = 12; + int outside2 = 1337; + Supplier lambda = () -> { + int var1 = 1; + return var1 + outside + staticIntField; + }; + return lambda.get(); + } + + int intField = 42; + public int process() { + Supplier supplier = () -> { + int var1 = 1; + return var1 + intField; + }; + return supplier.get(); + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction10.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction10.java new file mode 100644 index 00000000000..b1d6f28bea7 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction10.java @@ -0,0 +1,16 @@ +package com.datadog.debugger.symbol; + +public class SymbolExtraction10 { + public static int main(String arg) { + Inner winner = new Inner(); + return winner.addTo(12); + } + + static class Inner { + private final int field1 = 1; + public int addTo(int arg) { + int var1 = 2; + return var1 + arg; + } + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction11.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction11.java new file mode 100644 index 00000000000..b4ba9ccd520 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction11.java @@ -0,0 +1,13 @@ +package com.datadog.debugger.symbol; + +public class SymbolExtraction11 { + private final int field1 = 1; + public static int main(int arg) { + int var1 = 1; + if (arg == 42) { + int var2 = 2; + return var2; + } + return var1 + arg; + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction12.java b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction12.java new file mode 100644 index 00000000000..330fbb1ae81 --- /dev/null +++ b/dd-java-agent/agent-debugger/src/test/resources/com/datadog/debugger/symbol/SymbolExtraction12.java @@ -0,0 +1,22 @@ +package com.datadog.debugger.symbol; + +import java.util.Arrays; +import java.util.List; + +public class SymbolExtraction12 { + public static int main(int arg) { + List list = Arrays.asList(1, 2, 3); + int sum = list + .stream() + .mapToInt(x -> x + 1).map(x -> x - 12) + .sum(); + return sum; + } + + public static int foo(int arg) { + return Arrays.asList(1, 2, 3, 4) + .stream() + .mapToInt(x -> x + 1).map(x -> x - 12) + .sum(); + } +} diff --git a/dd-java-agent/agent-debugger/src/test/resources/test_log_probe.json b/dd-java-agent/agent-debugger/src/test/resources/test_log_probe.json index c52e5031e94..fc840be8e9f 100644 --- a/dd-java-agent/agent-debugger/src/test/resources/test_log_probe.json +++ b/dd-java-agent/agent-debugger/src/test/resources/test_log_probe.json @@ -18,7 +18,7 @@ "typeName": "VetController", "methodName": "showVetList" }, - "template": "this is a log line customized! uuid={uuid} result={result} garbageStart={garbageStart}", + "template": "this is a log line customized! uuid={uuid} result={result} garbageStart={garbageStart} contain={contains(arg, 'foo')}", "segments": [ { "str": "this is a log line customized! uuid=" @@ -35,6 +35,11 @@ }, { "dsl": "garbageStart", "json": {"ref": "garbageStart"} + }, { + "str": " contain=" + }, { + "dsl": "contains(arg, 'foo')", + "json": {"contains": [{"ref": "arg"}, "foo"]} } ] }] diff --git a/dd-java-agent/agent-iast/build.gradle b/dd-java-agent/agent-iast/build.gradle index 4dfb4d9e3f0..068bbf5b61d 100644 --- a/dd-java-agent/agent-iast/build.gradle +++ b/dd-java-agent/agent-iast/build.gradle @@ -1,12 +1,46 @@ +import net.ltgt.gradle.errorprone.CheckSeverity + plugins { id 'com.github.johnrengelman.shadow' id 'me.champeau.jmh' id 'java-test-fixtures' + id 'com.google.protobuf' version '0.8.18' + id 'net.ltgt.errorprone' version '3.1.0' } apply from: "$rootDir/gradle/java.gradle" apply from: "$rootDir/gradle/version.gradle" +java { + toolchain { + languageVersion.set(JavaLanguageVersion.of(11)) + } + sourceCompatibility = 1.8 + targetCompatibility = 1.8 +} + +tasks.withType(AbstractCompile).configureEach { + // ensure no APIs beyond JDK8 are used + options.compilerArgs.addAll(['--release', '8']) +} + +// First version with Mac M1 support +def grpcVersion = '1.42.2' +protobuf { + protoc { + // Download compiler rather than using locally installed version: + // First version with Mac M1 support + artifact = 'com.google.protobuf:protoc:3.17.3' + } + plugins { + // First version with aarch support + grpc { artifact = "io.grpc:protoc-gen-grpc-java:${grpcVersion}" } + } + generateProtoTasks { + all()*.plugins { grpc {} } + } +} + dependencies { api deps.slf4j @@ -23,11 +57,18 @@ dependencies { testImplementation('org.skyscreamer:jsonassert:1.5.1') testImplementation('org.codehaus.groovy:groovy-yaml:3.0.17') + testImplementation group: 'io.grpc', name: 'grpc-core', version: grpcVersion + testImplementation group: 'io.grpc', name: 'grpc-protobuf', version: grpcVersion + jmh project(':utils:test-utils') jmh project(':dd-trace-core') jmh project(':dd-java-agent:agent-builder') jmh project(':dd-java-agent:instrumentation:iast-instrumenter') jmh project(':dd-java-agent:instrumentation:java-lang') + + compileOnly('org.jetbrains:annotations:24.0.0') + errorprone('com.uber.nullaway:nullaway:0.10.15') + errorprone('com.google.errorprone:error_prone_core:2.23.0') } shadowJar { @@ -59,7 +100,6 @@ ext { tasks.withType(Test).configureEach { jvmArgs += ['-Ddd.iast.enabled=true'] } -def rootDir = project.rootDir spotless { java { target 'src/**/*.java' @@ -75,3 +115,24 @@ pitest { targetClasses = ['com.datadog.iast.*'] jvmArgs = ['-Ddd.iast.enabled=true'] } + +sourceSets { + test { + java { + srcDirs += ["$buildDir/generated/source/proto/test/java"] + } + } +} + +tasks.withType(JavaCompile).configureEach { + if (name == 'compileJava') { + options.errorprone { + check("NullAway", CheckSeverity.ERROR) + option("NullAway:AnnotatedPackages", "com.datadog.iast") + disableAllWarnings = true // only errors for now + } + } else { + // disable null away for test and jmh + options.errorprone.enabled = false + } +} diff --git a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderAppendBenchmark.java b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderAppendBenchmark.java index babe608c0f3..e4bb2ee9f46 100644 --- a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderAppendBenchmark.java +++ b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderAppendBenchmark.java @@ -1,6 +1,6 @@ package com.datadog.iast.propagation; -import static com.datadog.iast.model.Range.NOT_MARKED; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Range; diff --git a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderBatchBenchmark.java b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderBatchBenchmark.java index 1d5f61a43bd..8bb5d4adb20 100644 --- a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderBatchBenchmark.java +++ b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderBatchBenchmark.java @@ -1,5 +1,6 @@ package com.datadog.iast.propagation; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; import static java.util.concurrent.TimeUnit.MICROSECONDS; import com.datadog.iast.IastRequestContext; @@ -34,8 +35,7 @@ protected StringBuilderBatchBenchmark.Context initializeContext() { final String value; if (current < limit) { value = - tainted( - context, UUID.randomUUID().toString(), new Range(3, 6, source(), Range.NOT_MARKED)); + tainted(context, UUID.randomUUID().toString(), new Range(3, 6, source(), NOT_MARKED)); } else { value = notTainted(UUID.randomUUID().toString()); } diff --git a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderInitBenchmark.java b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderInitBenchmark.java index a3c832355dc..ec11b927ff2 100644 --- a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderInitBenchmark.java +++ b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderInitBenchmark.java @@ -1,5 +1,7 @@ package com.datadog.iast.propagation; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; + import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Range; import datadog.trace.instrumentation.java.lang.StringBuilderCallSite; @@ -14,7 +16,7 @@ protected Context initializeContext() { final IastRequestContext context = new IastRequestContext(); final String notTainted = notTainted("I am not a tainted string"); final String tainted = - tainted(context, "I am a tainted string", new Range(3, 6, source(), Range.NOT_MARKED)); + tainted(context, "I am a tainted string", new Range(3, 6, source(), NOT_MARKED)); return new Context(context, notTainted, tainted); } diff --git a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderToStringBenchmark.java b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderToStringBenchmark.java index 707e9b04611..1a8468610a4 100644 --- a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderToStringBenchmark.java +++ b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringBuilderToStringBenchmark.java @@ -1,5 +1,7 @@ package com.datadog.iast.propagation; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; + import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Range; import datadog.trace.instrumentation.java.lang.StringBuilderCallSite; @@ -18,7 +20,7 @@ protected Context initializeContext() { tainted( context, new StringBuilder("I am a tainted string builder"), - new Range(5, 7, source(), Range.NOT_MARKED)); + new Range(5, 7, source(), NOT_MARKED)); return new Context(context, notTaintedBuilder, taintedBuilder); } diff --git a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatBenchmark.java b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatBenchmark.java index 4dded1b999e..0858263ff00 100644 --- a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatBenchmark.java +++ b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatBenchmark.java @@ -1,5 +1,7 @@ package com.datadog.iast.propagation; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; + import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Range; import datadog.trace.instrumentation.java.lang.StringCallSite; @@ -13,7 +15,7 @@ protected StringConcatBenchmark.Context initializeContext() { final IastRequestContext context = new IastRequestContext(); final String notTainted = notTainted("I am not a tainted string"); final String tainted = - tainted(context, "I am a tainted string", new Range(3, 5, source(), Range.NOT_MARKED)); + tainted(context, "I am a tainted string", new Range(3, 5, source(), NOT_MARKED)); return new StringConcatBenchmark.Context(context, notTainted, tainted); } diff --git a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatFactoryBatchBenchmark.java b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatFactoryBatchBenchmark.java index 42e1741f361..59f0353ec51 100644 --- a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatFactoryBatchBenchmark.java +++ b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatFactoryBatchBenchmark.java @@ -1,5 +1,6 @@ package com.datadog.iast.propagation; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; import static java.util.concurrent.TimeUnit.MICROSECONDS; import com.datadog.iast.IastRequestContext; @@ -54,7 +55,7 @@ protected StringConcatFactoryBatchBenchmark.Context initializeContext() { double current = i / (double) stringCount; final String value; if (current < limit) { - value = tainted(context, "Yep, tainted", new Range(3, 5, source(), Range.NOT_MARKED)); + value = tainted(context, "Yep, tainted", new Range(3, 5, source(), NOT_MARKED)); } else { value = notTainted("Nop, tainted"); } diff --git a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatFactoryBenchmark.java b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatFactoryBenchmark.java index 6f99576fa56..9008bea3ad4 100644 --- a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatFactoryBenchmark.java +++ b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringConcatFactoryBenchmark.java @@ -1,5 +1,7 @@ package com.datadog.iast.propagation; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; + import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Range; import datadog.trace.api.iast.InstrumentationBridge; @@ -13,8 +15,7 @@ public class StringConcatFactoryBenchmark protected StringConcatFactoryBenchmark.Context initializeContext() { final IastRequestContext context = new IastRequestContext(); final String notTainted = notTainted("Nop, tainted"); - final String tainted = - tainted(context, "Yep, tainted", new Range(3, 5, source(), Range.NOT_MARKED)); + final String tainted = tainted(context, "Yep, tainted", new Range(3, 5, source(), NOT_MARKED)); return new StringConcatFactoryBenchmark.Context(context, notTainted, tainted); } diff --git a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringJoinBenchmark.java b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringJoinBenchmark.java index 5b7b4d87a69..3f8bb4b6cfd 100644 --- a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringJoinBenchmark.java +++ b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringJoinBenchmark.java @@ -1,6 +1,6 @@ package com.datadog.iast.propagation; -import static com.datadog.iast.model.Range.NOT_MARKED; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Range; diff --git a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringSubsequenceBenchmark.java b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringSubsequenceBenchmark.java index 78e8f030be8..c8ba76a2add 100644 --- a/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringSubsequenceBenchmark.java +++ b/dd-java-agent/agent-iast/src/jmh/java/com/datadog/iast/propagation/StringSubsequenceBenchmark.java @@ -1,6 +1,6 @@ package com.datadog.iast.propagation; -import static com.datadog.iast.model.Range.NOT_MARKED; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Range; diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/Dependencies.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/Dependencies.java new file mode 100644 index 00000000000..e4df7602205 --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/Dependencies.java @@ -0,0 +1,41 @@ +package com.datadog.iast; + +import com.datadog.iast.overhead.OverheadController; +import datadog.trace.api.Config; +import datadog.trace.util.stacktrace.StackWalker; +import javax.annotation.Nonnull; + +public class Dependencies { + + private final Config config; + private final Reporter reporter; + private final OverheadController overheadController; + private final StackWalker stackWalker; + + public Dependencies( + @Nonnull final Config config, + @Nonnull final Reporter reporter, + @Nonnull final OverheadController overheadController, + @Nonnull final StackWalker stackWalker) { + this.config = config; + this.reporter = reporter; + this.overheadController = overheadController; + this.stackWalker = stackWalker; + } + + public Config getConfig() { + return config; + } + + public Reporter getReporter() { + return reporter; + } + + public OverheadController getOverheadController() { + return overheadController; + } + + public StackWalker getStackWalker() { + return stackWalker; + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/GrpcRequestMessageHandler.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/GrpcRequestMessageHandler.java new file mode 100644 index 00000000000..8c167fbed7b --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/GrpcRequestMessageHandler.java @@ -0,0 +1,43 @@ +package com.datadog.iast; + +import datadog.trace.api.gateway.Flow; +import datadog.trace.api.gateway.RequestContext; +import datadog.trace.api.iast.IastContext; +import datadog.trace.api.iast.InstrumentationBridge; +import datadog.trace.api.iast.SourceTypes; +import datadog.trace.api.iast.propagation.PropagationModule; +import java.util.function.BiFunction; +import javax.annotation.Nonnull; + +public class GrpcRequestMessageHandler implements BiFunction> { + + /** + * This will cover: + * + *
    + *
  • com.google.protobuf.GeneratedMessage + *
  • com.google.protobuf.GeneratedMessageV3 + *
  • com.google.protobuf.GeneratedMessageLite + *
+ */ + private static final String GENERATED_MESSAGE = "com.google.protobuf.GeneratedMessage"; + + /** Maps map to this class that does not implement Map interface */ + private static final String MAP_FIELD = "com.google.protobuf.MapField"; + + @Override + public Flow apply(final RequestContext ctx, final Object o) { + final PropagationModule module = InstrumentationBridge.PROPAGATION; + if (module != null && o != null) { + final IastContext iastCtx = IastContext.Provider.get(ctx); + module.taintDeeply( + iastCtx, o, SourceTypes.GRPC_BODY, GrpcRequestMessageHandler::isProtobufArtifact); + } + return Flow.ResultFlow.empty(); + } + + static boolean isProtobufArtifact(@Nonnull final Class kls) { + return kls.getSuperclass().getName().startsWith(GENERATED_MESSAGE) + || MAP_FIELD.equals(kls.getName()); + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/HasDependencies.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/HasDependencies.java deleted file mode 100644 index 9e1d7cf758f..00000000000 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/HasDependencies.java +++ /dev/null @@ -1,45 +0,0 @@ -package com.datadog.iast; - -import com.datadog.iast.overhead.OverheadController; -import datadog.trace.api.Config; -import datadog.trace.util.stacktrace.StackWalker; -import javax.annotation.Nonnull; - -public interface HasDependencies { - - void registerDependencies(@Nonnull Dependencies dependencies); - - class Dependencies { - private final Config config; - private final Reporter reporter; - private final OverheadController overheadController; - private final StackWalker stackWalker; - - public Dependencies( - @Nonnull final Config config, - @Nonnull final Reporter reporter, - @Nonnull final OverheadController overheadController, - @Nonnull final StackWalker stackWalker) { - this.config = config; - this.reporter = reporter; - this.overheadController = overheadController; - this.stackWalker = stackWalker; - } - - public Config getConfig() { - return config; - } - - public Reporter getReporter() { - return reporter; - } - - public OverheadController getOverheadController() { - return overheadController; - } - - public StackWalker getStackWalker() { - return stackWalker; - } - } -} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/IastRequestContext.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/IastRequestContext.java index d6d79978118..15817bde263 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/IastRequestContext.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/IastRequestContext.java @@ -4,21 +4,25 @@ import com.datadog.iast.overhead.OverheadContext; import com.datadog.iast.taint.TaintedObjects; import datadog.trace.api.gateway.RequestContext; -import datadog.trace.api.gateway.RequestContextSlot; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.telemetry.IastMetricCollector; import datadog.trace.api.iast.telemetry.IastMetricCollector.HasMetricCollector; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; -import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import java.util.concurrent.atomic.AtomicBoolean; +import javax.annotation.Nonnull; import javax.annotation.Nullable; -public class IastRequestContext implements HasMetricCollector { +public class IastRequestContext implements IastContext, HasMetricCollector { private final VulnerabilityBatch vulnerabilityBatch; private final AtomicBoolean spanDataIsSet; private final TaintedObjects taintedObjects; private final OverheadContext overheadContext; - private final IastMetricCollector collector; + @Nullable private final IastMetricCollector collector; + @Nullable private volatile String strictTransportSecurity; + @Nullable private volatile String xContentTypeOptions; + @Nullable private volatile String xForwardedProto; + @Nullable private volatile String contentType; public IastRequestContext() { this(TaintedObjects.acquire(), null); @@ -29,7 +33,7 @@ public IastRequestContext(final TaintedObjects taintedObjects) { } public IastRequestContext( - final TaintedObjects taintedObjects, final IastMetricCollector collector) { + final TaintedObjects taintedObjects, @Nullable final IastMetricCollector collector) { this.vulnerabilityBatch = new VulnerabilityBatch(); this.spanDataIsSet = new AtomicBoolean(false); this.overheadContext = new OverheadContext(); @@ -41,6 +45,42 @@ public VulnerabilityBatch getVulnerabilityBatch() { return vulnerabilityBatch; } + @Nullable + public String getStrictTransportSecurity() { + return strictTransportSecurity; + } + + public void setStrictTransportSecurity(final String strictTransportSecurity) { + this.strictTransportSecurity = strictTransportSecurity; + } + + @Nullable + public String getxContentTypeOptions() { + return xContentTypeOptions; + } + + public void setxContentTypeOptions(final String xContentTypeOptions) { + this.xContentTypeOptions = xContentTypeOptions; + } + + @Nullable + public String getxForwardedProto() { + return xForwardedProto; + } + + public void setxForwardedProto(final String xForwardedProto) { + this.xForwardedProto = xForwardedProto; + } + + @Nullable + public String getContentType() { + return contentType; + } + + public void setContentType(final String contentType) { + this.contentType = contentType; + } + public boolean getAndSetSpanDataIsSet() { return spanDataIsSet.getAndSet(true); } @@ -49,6 +89,7 @@ public OverheadContext getOverheadContext() { return overheadContext; } + @Nonnull public TaintedObjects getTaintedObjects() { return taintedObjects; } @@ -61,22 +102,21 @@ public IastMetricCollector getMetricCollector() { @Nullable public static IastRequestContext get() { - return get(AgentTracer.activeSpan()); + return asRequestContext(IastContext.Provider.get()); } @Nullable public static IastRequestContext get(final AgentSpan span) { - if (span == null) { - return null; - } - return get(span.getRequestContext()); + return asRequestContext(IastContext.Provider.get(span)); } @Nullable public static IastRequestContext get(final RequestContext reqCtx) { - if (reqCtx == null) { - return null; - } - return reqCtx.getData(RequestContextSlot.IAST); + return asRequestContext(IastContext.Provider.get(reqCtx)); + } + + @Nullable + private static IastRequestContext asRequestContext(final IastContext ctx) { + return ctx instanceof IastRequestContext ? (IastRequestContext) ctx : null; } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/IastSystem.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/IastSystem.java index df7c8b5cd58..589b2dfc80e 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/IastSystem.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/IastSystem.java @@ -1,11 +1,11 @@ package com.datadog.iast; -import com.datadog.iast.HasDependencies.Dependencies; import com.datadog.iast.overhead.OverheadController; import com.datadog.iast.propagation.FastCodecModule; import com.datadog.iast.propagation.PropagationModuleImpl; import com.datadog.iast.propagation.StringModuleImpl; import com.datadog.iast.sink.CommandInjectionModuleImpl; +import com.datadog.iast.sink.HstsMissingHeaderModuleImpl; import com.datadog.iast.sink.HttpResponseHeaderModuleImpl; import com.datadog.iast.sink.InsecureCookieModuleImpl; import com.datadog.iast.sink.LdapInjectionModuleImpl; @@ -19,13 +19,14 @@ import com.datadog.iast.sink.WeakCipherModuleImpl; import com.datadog.iast.sink.WeakHashModuleImpl; import com.datadog.iast.sink.WeakRandomnessModuleImpl; +import com.datadog.iast.sink.XContentTypeModuleImpl; import com.datadog.iast.sink.XPathInjectionModuleImpl; import com.datadog.iast.sink.XssModuleImpl; -import com.datadog.iast.source.WebModuleImpl; import com.datadog.iast.telemetry.TelemetryRequestEndedHandler; import com.datadog.iast.telemetry.TelemetryRequestStartedHandler; import datadog.trace.api.Config; import datadog.trace.api.ProductActivation; +import datadog.trace.api.function.TriConsumer; import datadog.trace.api.gateway.EventType; import datadog.trace.api.gateway.Events; import datadog.trace.api.gateway.Flow; @@ -38,9 +39,9 @@ import datadog.trace.util.AgentTaskScheduler; import datadog.trace.util.stacktrace.StackWalkerFactory; import java.util.function.BiFunction; -import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.Stream; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -53,7 +54,8 @@ public static void start(final SubscriptionService ss) { start(ss, null); } - public static void start(final SubscriptionService ss, OverheadController overheadController) { + public static void start( + final SubscriptionService ss, @Nullable OverheadController overheadController) { final Config config = Config.get(); if (config.getIastActivation() != ProductActivation.FULLY_ENABLED) { LOGGER.debug("IAST is disabled"); @@ -68,43 +70,37 @@ public static void start(final SubscriptionService ss, OverheadController overhe final Dependencies dependencies = new Dependencies(config, reporter, overheadController, StackWalkerFactory.INSTANCE); final boolean addTelemetry = config.getIastTelemetryVerbosity() != Verbosity.OFF; - iastModules().forEach(registerModule(dependencies)); + iastModules(dependencies).forEach(InstrumentationBridge::registerIastModule); registerRequestStartedCallback(ss, addTelemetry, dependencies); registerRequestEndedCallback(ss, addTelemetry, dependencies); + registerHeadersCallback(ss); + registerGrpcServerRequestMessageCallback(ss); LOGGER.debug("IAST started"); } - private static Consumer registerModule(final Dependencies dependencies) { - return module -> { - if (module instanceof HasDependencies) { - ((HasDependencies) module).registerDependencies(dependencies); - } - InstrumentationBridge.registerIastModule(module); - }; - } - - private static Stream iastModules() { + private static Stream iastModules(final Dependencies dependencies) { return Stream.of( - new WebModuleImpl(), new StringModuleImpl(), new FastCodecModule(), - new SqlInjectionModuleImpl(), - new PathTraversalModuleImpl(), - new CommandInjectionModuleImpl(), - new WeakCipherModuleImpl(), - new WeakHashModuleImpl(), - new LdapInjectionModuleImpl(), + new SqlInjectionModuleImpl(dependencies), + new PathTraversalModuleImpl(dependencies), + new CommandInjectionModuleImpl(dependencies), + new WeakCipherModuleImpl(dependencies), + new WeakHashModuleImpl(dependencies), + new LdapInjectionModuleImpl(dependencies), new PropagationModuleImpl(), - new HttpResponseHeaderModuleImpl(), + new HttpResponseHeaderModuleImpl(dependencies), + new HstsMissingHeaderModuleImpl(dependencies), new InsecureCookieModuleImpl(), new NoHttpOnlyCookieModuleImpl(), + new XContentTypeModuleImpl(dependencies), new NoSameSiteCookieModuleImpl(), - new SsrfModuleImpl(), - new UnvalidatedRedirectModuleImpl(), - new WeakRandomnessModuleImpl(), - new XPathInjectionModuleImpl(), - new TrustBoundaryViolationModuleImpl(), - new XssModuleImpl()); + new SsrfModuleImpl(dependencies), + new UnvalidatedRedirectModuleImpl(dependencies), + new WeakRandomnessModuleImpl(dependencies), + new XPathInjectionModuleImpl(dependencies), + new TrustBoundaryViolationModuleImpl(dependencies), + new XssModuleImpl(dependencies)); } private static void registerRequestStartedCallback( @@ -124,4 +120,15 @@ private static void registerRequestEndedCallback( final RequestEndedHandler handler = new RequestEndedHandler(dependencies); ss.registerCallback(event, addTelemetry ? new TelemetryRequestEndedHandler(handler) : handler); } + + private static void registerHeadersCallback(final SubscriptionService ss) { + final EventType> event = + Events.get().requestHeader(); + final TriConsumer handler = new RequestHeaderHandler(); + ss.registerCallback(event, handler); + } + + private static void registerGrpcServerRequestMessageCallback(final SubscriptionService ss) { + ss.registerCallback(Events.get().grpcServerRequestMessage(), new GrpcRequestMessageHandler()); + } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/Reporter.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/Reporter.java index df2a732e2a0..bb4f3436a27 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/Reporter.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/Reporter.java @@ -4,6 +4,7 @@ import com.datadog.iast.model.Vulnerability; import com.datadog.iast.model.VulnerabilityBatch; +import com.datadog.iast.taint.TaintedObjects; import datadog.trace.api.Config; import datadog.trace.api.DDTags; import datadog.trace.api.gateway.RequestContext; @@ -29,7 +30,7 @@ public class Reporter { this(Config.get(), null); } - public Reporter(final Config config, final AgentTaskScheduler taskScheduler) { + public Reporter(final Config config, @Nullable final AgentTaskScheduler taskScheduler) { this( config.isIastDeduplicationEnabled() ? new HashBasedDeduplication(taskScheduler) @@ -41,10 +42,13 @@ public Reporter(final Config config, final AgentTaskScheduler taskScheduler) { } public void report(@Nullable final AgentSpan span, @Nonnull final Vulnerability vulnerability) { + if (duplicated.test(vulnerability)) { + return; + } if (span == null) { final AgentSpan newSpan = startNewSpan(); try (final AgentScope autoClosed = tracer().activateSpan(newSpan, ScopeSource.MANUAL)) { - vulnerability.getLocation().updateSpan(newSpan.getSpanId()); + vulnerability.updateSpan(newSpan); reportVulnerability(newSpan, vulnerability); } finally { newSpan.finish(); @@ -64,9 +68,6 @@ private void reportVulnerability( if (ctx == null) { return; } - if (duplicated.test(vulnerability)) { - return; - } final VulnerabilityBatch batch = ctx.getVulnerabilityBatch(); batch.add(vulnerability); if (!ctx.getAndSetSpanDataIsSet()) { @@ -81,7 +82,8 @@ private void reportVulnerability( private AgentSpan startNewSpan() { final AgentSpan.Context tagContext = - new TagContext().withRequestContextDataIast(new IastRequestContext()); + new TagContext() + .withRequestContextDataIast(new IastRequestContext(TaintedObjects.NoOp.INSTANCE)); final AgentSpan span = tracer() .startSpan("iast", VULNERABILITY_SPAN_NAME, tagContext) @@ -106,11 +108,11 @@ protected static class HashBasedDeduplication implements Predicate hashes; - public HashBasedDeduplication(final AgentTaskScheduler taskScheduler) { + public HashBasedDeduplication(@Nullable final AgentTaskScheduler taskScheduler) { this(DEFAULT_MAX_SIZE, taskScheduler); } - HashBasedDeduplication(final int size, final AgentTaskScheduler taskScheduler) { + HashBasedDeduplication(final int size, @Nullable final AgentTaskScheduler taskScheduler) { maxSize = size; hashes = ConcurrentHashMap.newKeySet(size); if (taskScheduler != null) { diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestEndedHandler.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestEndedHandler.java index 1dcadd078d5..fc2c9a18975 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestEndedHandler.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestEndedHandler.java @@ -3,12 +3,13 @@ import static com.datadog.iast.IastTag.ANALYZED; import static com.datadog.iast.IastTag.SKIPPED; -import com.datadog.iast.HasDependencies.Dependencies; import com.datadog.iast.overhead.OverheadController; import com.datadog.iast.taint.TaintedObjects; import datadog.trace.api.gateway.Flow; import datadog.trace.api.gateway.IGSpanInfo; import datadog.trace.api.gateway.RequestContext; +import datadog.trace.api.iast.InstrumentationBridge; +import datadog.trace.api.iast.sink.HttpRequestEndModule; import datadog.trace.api.internal.TraceSegment; import java.util.function.BiFunction; import javax.annotation.Nonnull; @@ -26,6 +27,11 @@ public Flow apply(final RequestContext requestContext, final IGSpanInfo ig final TraceSegment traceSegment = requestContext.getTraceSegment(); final IastRequestContext iastRequestContext = IastRequestContext.get(requestContext); if (iastRequestContext != null) { + for (HttpRequestEndModule module : requestEndModules()) { + if (module != null) { + module.onRequestEnd(iastRequestContext, igSpanInfo); + } + } try { ANALYZED.setTagTop(traceSegment); final TaintedObjects taintedObjects = iastRequestContext.getTaintedObjects(); @@ -40,4 +46,11 @@ public Flow apply(final RequestContext requestContext, final IGSpanInfo ig } return Flow.ResultFlow.empty(); } + + private HttpRequestEndModule[] requestEndModules() { + return new HttpRequestEndModule[] { + InstrumentationBridge.HSTS_MISSING_HEADER_MODULE, + InstrumentationBridge.X_CONTENT_TYPE_HEADER_MODULE + }; + } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestHeaderHandler.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestHeaderHandler.java new file mode 100644 index 00000000000..4effb6408c5 --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestHeaderHandler.java @@ -0,0 +1,21 @@ +package com.datadog.iast; + +import com.datadog.iast.util.HttpHeader; +import com.datadog.iast.util.HttpHeader.ContextAwareHeader; +import datadog.trace.api.function.TriConsumer; +import datadog.trace.api.gateway.RequestContext; +import datadog.trace.api.gateway.RequestContextSlot; + +public class RequestHeaderHandler implements TriConsumer { + + @Override + public void accept(RequestContext requestContext, String key, String value) { + final IastRequestContext ctx = requestContext.getData(RequestContextSlot.IAST); + if (null != ctx && key != null) { + final HttpHeader header = HttpHeader.from(key); + if (header instanceof ContextAwareHeader) { + ((ContextAwareHeader) header).onHeader(ctx, value); + } + } + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestStartedHandler.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestStartedHandler.java index 9c680717743..4d5c851f52c 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestStartedHandler.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/RequestStartedHandler.java @@ -1,6 +1,5 @@ package com.datadog.iast; -import com.datadog.iast.HasDependencies.Dependencies; import com.datadog.iast.overhead.OverheadController; import datadog.trace.api.gateway.Flow; import java.util.function.Supplier; diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Evidence.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Evidence.java index 65b2f78e374..c4dc17d27e5 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Evidence.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Evidence.java @@ -17,6 +17,8 @@ public final class Evidence { private final transient @Nonnull Context context = new Evidence.Context(4); /** For deserialization in tests via moshi */ + @Deprecated + @SuppressWarnings({"NullAway", "DataFlowIssue", "unused"}) private Evidence() { this(null, null); } @@ -25,15 +27,17 @@ public Evidence(final String value) { this(value, null); } - public Evidence(final String value, final Range[] ranges) { + public Evidence(@Nonnull final String value, @Nullable final Range[] ranges) { this.value = value; this.ranges = ranges; } + @Nonnull public String getValue() { return value; } + @Nullable public Range[] getRanges() { return ranges; } @@ -76,6 +80,7 @@ public boolean put(final String key, final Object value) { return true; } + @Nullable @SuppressWarnings("unchecked") public E get(@Nonnull final String key) { return (E) context.get(key); diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Location.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Location.java index a6919aed2c8..1dbe22b545d 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Location.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Location.java @@ -1,35 +1,62 @@ package com.datadog.iast.model; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import javax.annotation.Nullable; + public final class Location { - private final String path; + @Nullable private final String path; private final int line; - private final String method; + @Nullable private final String method; + + @Nullable private Long spanId; - private Long spanId; + @Nullable private transient String serviceName; - private Location(final long spanId, final String path, final int line, final String method) { - this.spanId = spanId == 0 ? null : spanId; + private Location( + @Nullable final Long spanId, + @Nullable final String path, + final int line, + @Nullable final String method, + @Nullable final String serviceName) { + this.spanId = spanId; this.path = path; this.line = line; this.method = method; + this.serviceName = serviceName; } - public static Location forSpanAndStack(final long spanId, final StackTraceElement stack) { - return new Location(spanId, stack.getClassName(), stack.getLineNumber(), stack.getMethodName()); + public static Location forSpanAndStack( + @Nullable final AgentSpan span, final StackTraceElement stack) { + return new Location( + spanId(span), + stack.getClassName(), + stack.getLineNumber(), + stack.getMethodName(), + serviceName(span)); } public static Location forSpanAndClassAndMethod( - final long spanId, final String clazz, final String method) { - return new Location(spanId, clazz, -1, method); + final AgentSpan span, final String clazz, final String method) { + return new Location(spanId(span), clazz, -1, method, serviceName(span)); + } + + public static Location forSpanAndFileAndLine( + final AgentSpan span, final String file, final int line) { + return new Location(spanId(span), file, line, null, serviceName(span)); + } + + public static Location forSpan(final AgentSpan span) { + return new Location(spanId(span), null, -1, null, serviceName(span)); } public long getSpanId() { return spanId == null ? 0 : spanId; } + @Nullable public String getPath() { return path; } @@ -38,11 +65,30 @@ public int getLine() { return line; } + @Nullable public String getMethod() { return method; } - public void updateSpan(final long spanId) { - this.spanId = spanId; + @Nullable + public String getServiceName() { + return serviceName; + } + + public void updateSpan(@Nullable final AgentSpan span) { + if (span != null) { + this.spanId = span.getSpanId(); + this.serviceName = span.getServiceName(); + } + } + + @Nullable + private static Long spanId(@Nullable AgentSpan span) { + return span != null ? span.getSpanId() : null; + } + + @Nullable + private static String serviceName(@Nullable AgentSpan span) { + return span != null ? span.getServiceName() : null; } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Range.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Range.java index 97caac40bd3..a8e1d43b28a 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Range.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Range.java @@ -1,5 +1,7 @@ package com.datadog.iast.model; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; + import com.datadog.iast.model.json.SourceIndex; import com.datadog.iast.util.Ranged; import java.util.Objects; @@ -9,8 +11,6 @@ public final class Range implements Ranged { - public static final int NOT_MARKED = 0; - private final @Nonnegative int start; private final @Nonnegative int length; private final @Nonnull @SourceIndex Source source; diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Source.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Source.java index 6dc5956adc7..31acd483a7a 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Source.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Source.java @@ -5,13 +5,19 @@ import datadog.trace.api.iast.Taintable; import java.util.Objects; import java.util.StringJoiner; +import javax.annotation.Nullable; public final class Source implements Taintable.Source { private final @SourceTypeString byte origin; - private final String name; - private final String value; + @Nullable private final String name; + @Nullable private final String value; - public Source(final byte origin, final String name, final String value) { + public Source( + final byte origin, @Nullable final CharSequence name, @Nullable final CharSequence value) { + this(origin, name == null ? null : name.toString(), value == null ? null : value.toString()); + } + + public Source(final byte origin, @Nullable final String name, @Nullable final String value) { this.origin = origin; this.name = name; this.value = value; @@ -23,11 +29,13 @@ public byte getOrigin() { } @Override + @Nullable public String getName() { return name; } @Override + @Nullable public String getValue() { return value; } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Vulnerability.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Vulnerability.java index 26db8a7d7e7..97ecf815775 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Vulnerability.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/Vulnerability.java @@ -1,5 +1,6 @@ package com.datadog.iast.model; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -11,24 +12,29 @@ public final class Vulnerability { private final @Nullable Evidence evidence; - private final long hash; + private long hash; public Vulnerability( - final VulnerabilityType type, final Location location, final Evidence evidence) { + @Nonnull final VulnerabilityType type, + @Nonnull final Location location, + @Nullable final Evidence evidence) { this.type = type; this.location = location; this.evidence = evidence; this.hash = type.calculateHash(this); } + @Nonnull public VulnerabilityType getType() { return type; } + @Nonnull public Location getLocation() { return location; } + @Nullable public Evidence getEvidence() { return evidence; } @@ -37,6 +43,15 @@ public long getHash() { return hash; } + public void updateSpan(final AgentSpan newSpan) { + if (newSpan != null) { + location.updateSpan(newSpan); + if (type instanceof VulnerabilityType.HeaderVulnerabilityType) { + hash = type.calculateHash(this); + } + } + } + @Override public boolean equals(Object o) { if (this == o) { diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/VulnerabilityBatch.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/VulnerabilityBatch.java index 17f59fbdd7c..fa5193b0a00 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/VulnerabilityBatch.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/VulnerabilityBatch.java @@ -3,12 +3,13 @@ import com.datadog.iast.model.json.VulnerabilityEncoding; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; /** Collects vulnerabilities and serializes to JSON lazily on {@link #toString()}. */ public final class VulnerabilityBatch { - private List vulnerabilities; - private volatile String json; + @Nullable private List vulnerabilities; + @Nullable private volatile String json; public void add(final Vulnerability v) { synchronized (this) { @@ -21,6 +22,7 @@ public void add(final Vulnerability v) { } /** Internal list of vulnerabilities. Not thread-safe. */ + @Nullable public List getVulnerabilities() { return vulnerabilities; } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/VulnerabilityType.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/VulnerabilityType.java index 6b204720c96..ae1d25ddfb3 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/VulnerabilityType.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/VulnerabilityType.java @@ -1,60 +1,69 @@ package com.datadog.iast.model; -import static com.datadog.iast.model.Range.NOT_MARKED; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; import datadog.trace.api.iast.VulnerabilityMarks; import datadog.trace.api.iast.VulnerabilityTypes; import java.io.File; +import java.nio.charset.StandardCharsets; import java.util.zip.CRC32; import javax.annotation.Nonnull; public interface VulnerabilityType { VulnerabilityType WEAK_CIPHER = - new VulnerabilityTypeImpl(VulnerabilityTypes.WEAK_CIPHER, NOT_MARKED); - VulnerabilityType WEAK_HASH = new VulnerabilityTypeImpl(VulnerabilityTypes.WEAK_HASH, NOT_MARKED); + new VulnerabilityTypeImpl(VulnerabilityTypes.WEAK_CIPHER_STRING, NOT_MARKED); + VulnerabilityType WEAK_HASH = + new VulnerabilityTypeImpl(VulnerabilityTypes.WEAK_HASH_STRING, NOT_MARKED); VulnerabilityType INSECURE_COOKIE = - new VulnerabilityTypeImpl(VulnerabilityTypes.INSECURE_COOKIE, NOT_MARKED); + new CookieVulnerabilityType(VulnerabilityTypes.INSECURE_COOKIE_STRING, NOT_MARKED); VulnerabilityType NO_HTTPONLY_COOKIE = - new VulnerabilityTypeImpl(VulnerabilityTypes.NO_HTTPONLY_COOKIE, NOT_MARKED); + new CookieVulnerabilityType(VulnerabilityTypes.NO_HTTPONLY_COOKIE_STRING, NOT_MARKED); + VulnerabilityType HSTS_HEADER_MISSING = + new HeaderVulnerabilityType(VulnerabilityTypes.HSTS_HEADER_MISSING_STRING, NOT_MARKED); + VulnerabilityType XCONTENTTYPE_HEADER_MISSING = + new HeaderVulnerabilityType( + VulnerabilityTypes.XCONTENTTYPE_HEADER_MISSING_STRING, NOT_MARKED); VulnerabilityType NO_SAMESITE_COOKIE = - new VulnerabilityTypeImpl(VulnerabilityTypes.NO_SAMESITE_COOKIE, NOT_MARKED); + new CookieVulnerabilityType(VulnerabilityTypes.NO_SAMESITE_COOKIE_STRING, NOT_MARKED); InjectionType SQL_INJECTION = new InjectionTypeImpl( - VulnerabilityTypes.SQL_INJECTION, VulnerabilityMarks.SQL_INJECTION_MARK, ' '); + VulnerabilityTypes.SQL_INJECTION_STRING, VulnerabilityMarks.SQL_INJECTION_MARK, ' '); InjectionType COMMAND_INJECTION = new InjectionTypeImpl( - VulnerabilityTypes.COMMAND_INJECTION, VulnerabilityMarks.COMMAND_INJECTION_MARK, ' '); + VulnerabilityTypes.COMMAND_INJECTION_STRING, + VulnerabilityMarks.COMMAND_INJECTION_MARK, + ' '); InjectionType PATH_TRAVERSAL = new InjectionTypeImpl( - VulnerabilityTypes.PATH_TRAVERSAL, + VulnerabilityTypes.PATH_TRAVERSAL_STRING, VulnerabilityMarks.PATH_TRAVERSAL_MARK, File.separatorChar); InjectionType LDAP_INJECTION = new InjectionTypeImpl( - VulnerabilityTypes.LDAP_INJECTION, VulnerabilityMarks.LDAP_INJECTION_MARK, ' '); + VulnerabilityTypes.LDAP_INJECTION_STRING, VulnerabilityMarks.LDAP_INJECTION_MARK, ' '); InjectionType SSRF = - new InjectionTypeImpl(VulnerabilityTypes.SSRF, VulnerabilityMarks.SSRF_MARK, ' '); + new InjectionTypeImpl(VulnerabilityTypes.SSRF_STRING, VulnerabilityMarks.SSRF_MARK, ' '); InjectionType UNVALIDATED_REDIRECT = new InjectionTypeImpl( - VulnerabilityTypes.UNVALIDATED_REDIRECT, + VulnerabilityTypes.UNVALIDATED_REDIRECT_STRING, VulnerabilityMarks.UNVALIDATED_REDIRECT_MARK, ' '); VulnerabilityType WEAK_RANDOMNESS = - new VulnerabilityTypeImpl(VulnerabilityTypes.WEAK_RANDOMNESS, NOT_MARKED); + new VulnerabilityTypeImpl(VulnerabilityTypes.WEAK_RANDOMNESS_STRING, NOT_MARKED); InjectionType XPATH_INJECTION = new InjectionTypeImpl( - VulnerabilityTypes.XPATH_INJECTION, VulnerabilityMarks.XPATH_INJECTION_MARK, ' '); + VulnerabilityTypes.XPATH_INJECTION_STRING, VulnerabilityMarks.XPATH_INJECTION_MARK, ' '); InjectionType TRUST_BOUNDARY_VIOLATION = new InjectionTypeImpl( - VulnerabilityTypes.TRUST_BOUNDARY_VIOLATION, + VulnerabilityTypes.TRUST_BOUNDARY_VIOLATION_STRING, VulnerabilityMarks.TRUST_BOUNDARY_VIOLATION, ' '); InjectionType XSS = - new InjectionTypeImpl(VulnerabilityTypes.XSS, VulnerabilityMarks.XSS_MARK, ' '); + new InjectionTypeImpl(VulnerabilityTypes.XSS_STRING, VulnerabilityMarks.XSS_MARK, ' '); String name(); @@ -91,17 +100,24 @@ public int mark() { @Override public long calculateHash(@Nonnull final Vulnerability vulnerability) { CRC32 crc = new CRC32(); - crc.update(name().getBytes()); + update(crc, name()); final Location location = vulnerability.getLocation(); if (location != null) { crc.update(location.getLine()); - crc.update(location.getPath().getBytes()); + if (location.getPath() != null) { + update(crc, location.getPath()); + } if (location.getLine() <= -1 && location.getMethod() != null) { - crc.update(location.getMethod().getBytes()); + update(crc, location.getMethod()); } } return crc.getValue(); } + + protected void update(final CRC32 crc, final String value) { + final byte[] bytes = value.getBytes(StandardCharsets.UTF_8); + crc.update(bytes, 0, bytes.length); + } } class InjectionTypeImpl extends VulnerabilityTypeImpl implements InjectionType { @@ -118,4 +134,38 @@ public char evidenceSeparator() { return evidenceSeparator; } } + + class HeaderVulnerabilityType extends VulnerabilityTypeImpl { + public HeaderVulnerabilityType(@Nonnull String name, int vulnerabilityMark) { + super(name, vulnerabilityMark); + } + + @Override + public long calculateHash(@Nonnull final Vulnerability vulnerability) { + CRC32 crc = new CRC32(); + update(crc, name()); + String serviceName = vulnerability.getLocation().getServiceName(); + if (serviceName != null) { + update(crc, serviceName); + } + return crc.getValue(); + } + } + + class CookieVulnerabilityType extends VulnerabilityTypeImpl { + public CookieVulnerabilityType(@Nonnull String name, int vulnerabilityMark) { + super(name, vulnerabilityMark); + } + + @Override + public long calculateHash(@Nonnull final Vulnerability vulnerability) { + CRC32 crc = new CRC32(); + update(crc, name()); + final Evidence evidence = vulnerability.getEvidence(); + if (evidence != null) { + update(crc, evidence.getValue()); + } + return crc.getValue(); + } + } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/AdapterFactory.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/AdapterFactory.java index e94b8e5ce60..b62e74388ae 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/AdapterFactory.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/AdapterFactory.java @@ -37,7 +37,7 @@ static class Context { final List sources; final Map sourceIndexMap; final Map sourceContext; - Vulnerability vulnerability; + @Nullable Vulnerability vulnerability; public Context() { sources = new ArrayList<>(); @@ -73,7 +73,7 @@ public JsonAdapter create( if (hasSourceIndexAnnotation(annotations)) { return new SourceIndexAdapter(); } else { - return new SourceAdapter(this, moshi); + return new SourceAdapter(); } } else if (VulnerabilityBatch.class.equals(rawType)) { return new VulnerabilityBatchAdapter(moshi); @@ -83,6 +83,8 @@ public JsonAdapter create( return new EvidenceAdapter(moshi); } else if (VulnerabilityType.class.equals(rawType)) { return new VulnerabilityTypeAdapter(); + } else if (TruncatedVulnerabilities.class.equals(rawType)) { + return new TruncatedVulnerabilitiesAdapter(moshi); } return null; } @@ -181,7 +183,7 @@ public static class RedactionContext { private final Source source; private final boolean sensitive; private boolean sensitiveRanges; - private String redactedValue; + @Nullable private String redactedValue; public RedactionContext(final Source source) { this.source = source; @@ -204,6 +206,7 @@ public boolean shouldRedact() { return sensitive || sensitiveRanges; } + @Nullable public String getRedactedValue() { return redactedValue; } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/EvidenceAdapter.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/EvidenceAdapter.java index a14e47db13d..9c78f11ed81 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/EvidenceAdapter.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/EvidenceAdapter.java @@ -1,5 +1,7 @@ package com.datadog.iast.model.json; +import static com.datadog.iast.model.json.TruncationUtils.writeTruncableValue; + import com.datadog.iast.model.Evidence; import com.datadog.iast.model.Range; import com.datadog.iast.model.Source; @@ -29,9 +31,13 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class EvidenceAdapter extends FormattingAdapter { + private static final Logger log = LoggerFactory.getLogger(EvidenceAdapter.class); + private final JsonAdapter sourceAdapter; private final JsonAdapter defaultAdapter; private final JsonAdapter redactedAdapter; @@ -57,15 +63,23 @@ public void toJson(@Nonnull final JsonWriter writer, final @Nullable Evidence ev } private String substring(final String value, final Ranged range) { - final int end = Math.min(range.getStart() + range.getLength(), value.length()); + int end = Math.min(range.getStart() + range.getLength(), value.length()); + if (end < 0) { + log.debug("Invalid negative end parameter for substring. Value: {} Range: {}", value, range); + end = value.length(); + } return value.substring(range.getStart(), end); } private class DefaultEvidenceAdapter extends FormattingAdapter { @Override - public void toJson(@Nonnull final JsonWriter writer, final @Nonnull Evidence evidence) + public void toJson(@Nonnull final JsonWriter writer, final @Nullable Evidence evidence) throws IOException { + if (evidence == null) { + writer.nullValue(); + return; + } writer.beginObject(); if (evidence.getRanges() == null || evidence.getRanges().length == 0) { writer.name("value"); @@ -122,8 +136,12 @@ private void writeValuePart( private class RedactedEvidenceAdapter extends FormattingAdapter { @Override - public void toJson(@Nonnull final JsonWriter writer, @Nonnull final Evidence evidence) + public void toJson(@Nonnull final JsonWriter writer, @Nullable final Evidence evidence) throws IOException { + if (evidence == null) { + writer.nullValue(); + return; + } final Context ctx = Context.get(); final Vulnerability vulnerability = ctx.vulnerability; if (vulnerability == null) { @@ -153,7 +171,10 @@ private void toRedactedJson( writer.beginArray(); for (final Iterator it = new ValuePartIterator(ctx, value, tainted, sensitive); it.hasNext(); ) { - it.next().write(ctx, writer); + final ValuePart next = it.next(); + if (next != null) { + next.write(ctx, writer); + } } writer.endArray(); } @@ -195,6 +216,7 @@ public boolean hasNext() { return !next.isEmpty() || index < value.length(); } + @Nullable @Override public ValuePart next() { if (!hasNext()) { @@ -217,12 +239,17 @@ public ValuePart next() { } } if (nextSensitive != null) { - addNextStringValuePart(nextSensitive.getStart(), next); // pending string chunk - handleSensitiveValue(nextSensitive); + if (nextSensitive.isBefore(nextTainted)) { + addNextStringValuePart(nextSensitive.getStart(), next); // pending string chunk + handleSensitiveValue(nextSensitive); + } else { + sensitive.addFirst(nextSensitive); + } } return next.poll(); } + @Nullable private Ranged handleTaintedValue( @Nonnull final Range nextTainted, @Nullable Ranged nextSensitive) { final RedactionContext redactionCtx = ctx.getRedaction(nextTainted.getSource()); @@ -276,6 +303,7 @@ private void handleSensitiveValue(@Nonnull Ranged nextSensitive) { * Removes the tainted range from the sensitive one and returns whatever is before and enqueues * the rest */ + @Nullable private Ranged removeTaintedRange(final Ranged sensitive, final Range tainted) { final List disjointRanges = sensitive.remove(tainted); Ranged result = null; @@ -289,6 +317,7 @@ private Ranged removeTaintedRange(final Ranged sensitive, final Range tainted) { return result; } + @Nullable private ValuePart nextStringValuePart(final int end) { if (index < end) { final String chunk = value.substring(index, end); @@ -311,9 +340,10 @@ interface ValuePart { } static class StringValuePart implements ValuePart { - private final String value; - private StringValuePart(final String value) { + @Nullable private final String value; + + private StringValuePart(@Nullable final String value) { this.value = value; } @@ -324,7 +354,7 @@ public void write(final Context ctx, final JsonWriter writer) throws IOException } writer.beginObject(); writer.name("value"); - writer.value(value); + writeTruncableValue(writer, value); writer.endObject(); } } @@ -339,7 +369,7 @@ private RedactedValuePart(final String value) { @Override public void write(final Context ctx, final JsonWriter writer) throws IOException { - if (value == null || value.isEmpty()) { + if (value == null) { return; } writer.beginObject(); @@ -385,7 +415,7 @@ public void write(final Context ctx, final JsonWriter writer) throws IOException } else { writer.beginObject(); writer.name("value"); - writer.value(value); + writeTruncableValue(writer, value); writer.name("source"); adapter.toJson(writer, source); writer.endObject(); @@ -429,11 +459,13 @@ private void addValuePart( valueParts.add(new TaintedValuePart(adapter, source, chunk, false)); } else { final int length = chunk.length(); - final int matching = source.getValue().indexOf(chunk); + final String sourceValue = source.getValue(); + final String redactedValue = ctx.getRedactedValue(); + final int matching = (sourceValue == null) ? -1 : sourceValue.indexOf(chunk); final String pattern; - if (matching >= 0) { + if (matching >= 0 && redactedValue != null) { // if matches append the matching part from the redacted value - pattern = ctx.getRedactedValue().substring(matching, matching + length); + pattern = redactedValue.substring(matching, matching + length); } else { // otherwise redact the string pattern = SensitiveHandler.get().redactString(chunk); @@ -470,7 +502,7 @@ private TaintedValuePart( @Override public void write(final Context ctx, final JsonWriter writer) throws IOException { - if (value == null || value.isEmpty()) { + if (value == null) { return; } writer.beginObject(); @@ -483,7 +515,7 @@ public void write(final Context ctx, final JsonWriter writer) throws IOException } else { writer.name("value"); } - writer.value(value); + writeTruncableValue(writer, value); writer.endObject(); } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/FormattingAdapter.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/FormattingAdapter.java index 51a79738436..12d00ff344d 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/FormattingAdapter.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/FormattingAdapter.java @@ -1,5 +1,6 @@ package com.datadog.iast.model.json; +import com.squareup.moshi.FromJson; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.JsonReader; import java.io.IOException; @@ -8,6 +9,7 @@ public abstract class FormattingAdapter extends JsonAdapter { + @FromJson @Nullable @Override public final V fromJson(@Nonnull final JsonReader reader) throws IOException { diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/SourceAdapter.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/SourceAdapter.java index 342bbdf122d..7f5ab7edc1f 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/SourceAdapter.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/SourceAdapter.java @@ -1,26 +1,26 @@ package com.datadog.iast.model.json; +import static com.datadog.iast.model.json.TruncationUtils.writeTruncableValue; + import com.datadog.iast.model.Source; import com.datadog.iast.model.json.AdapterFactory.Context; import com.datadog.iast.model.json.AdapterFactory.RedactionContext; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.JsonWriter; -import com.squareup.moshi.Moshi; import datadog.trace.api.Config; import java.io.IOException; -import java.util.Collections; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class SourceAdapter extends FormattingAdapter { - private final SourceTypeAdapter sourceAdapter; + private final SourceTypeAdapter sourceTypeAdapter; private final JsonAdapter defaultAdapter; private final JsonAdapter redactedAdapter; - public SourceAdapter(final Factory factory, final Moshi moshi) { - sourceAdapter = new SourceTypeAdapter(); - defaultAdapter = moshi.nextAdapter(factory, Source.class, Collections.emptySet()); + public SourceAdapter() { + sourceTypeAdapter = new SourceTypeAdapter(); + defaultAdapter = new DefaultSourceAdapter(); redactedAdapter = new RedactedSourceAdapter(); } @@ -38,6 +38,21 @@ public void toJson(@Nonnull final JsonWriter writer, final @Nullable Source sour } } + private class DefaultSourceAdapter extends FormattingAdapter { + + @Override + public void toJson(@Nonnull JsonWriter writer, @Nonnull Source source) throws IOException { + writer.beginObject(); + writer.name("origin"); + sourceTypeAdapter.toJson(writer, source.getOrigin()); + writer.name("name"); + writer.value(source.getName()); + writer.name("value"); + writeTruncableValue(writer, source.getValue()); + writer.endObject(); + } + } + private class RedactedSourceAdapter extends FormattingAdapter { @Override @@ -51,17 +66,18 @@ public void toJson(@Nonnull final JsonWriter writer, final @Nonnull Source sourc } } - private void toRedactedJson(final JsonWriter writer, final Source source, final String value) + private void toRedactedJson( + final JsonWriter writer, final Source source, @Nullable final String value) throws IOException { writer.beginObject(); writer.name("origin"); - sourceAdapter.toJson(writer, source.getOrigin()); + sourceTypeAdapter.toJson(writer, source.getOrigin()); writer.name("name"); writer.value(source.getName()); writer.name("redacted"); writer.value(true); writer.name("pattern"); - writer.value(value); + writeTruncableValue(writer, value); writer.endObject(); } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/TruncatedVulnerabilities.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/TruncatedVulnerabilities.java new file mode 100644 index 00000000000..fe767384ef2 --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/TruncatedVulnerabilities.java @@ -0,0 +1,19 @@ +package com.datadog.iast.model.json; + +import com.datadog.iast.model.Vulnerability; +import java.util.List; +import javax.annotation.Nullable; + +public class TruncatedVulnerabilities { + + @Nullable private final List vulnerabilities; + + public TruncatedVulnerabilities(@Nullable final List vulnerabilities) { + this.vulnerabilities = vulnerabilities; + } + + @Nullable + public List getVulnerabilities() { + return vulnerabilities; + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/TruncatedVulnerabilitiesAdapter.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/TruncatedVulnerabilitiesAdapter.java new file mode 100644 index 00000000000..ffe32b726bc --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/TruncatedVulnerabilitiesAdapter.java @@ -0,0 +1,87 @@ +package com.datadog.iast.model.json; + +import com.datadog.iast.model.Evidence; +import com.datadog.iast.model.Location; +import com.datadog.iast.model.Vulnerability; +import com.datadog.iast.model.VulnerabilityType; +import com.squareup.moshi.JsonAdapter; +import com.squareup.moshi.JsonWriter; +import com.squareup.moshi.Moshi; +import java.io.IOException; +import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +class TruncatedVulnerabilitiesAdapter extends FormattingAdapter { + + private static final String MAX_SIZE_EXCEEDED = "MAX SIZE EXCEEDED"; + + private final JsonAdapter vulnerabilityAdapter; + + public TruncatedVulnerabilitiesAdapter(Moshi moshi) { + this.vulnerabilityAdapter = new TruncatedVulnerabilityAdapter(moshi); + } + + @Override + public void toJson(@Nonnull JsonWriter writer, @Nullable TruncatedVulnerabilities value) + throws IOException { + if (value == null) { + writer.nullValue(); + return; + } + final List vulnerabilities = value.getVulnerabilities(); + writer.beginObject(); + if (vulnerabilities != null && !vulnerabilities.isEmpty()) { + writer.name("vulnerabilities"); + writer.beginArray(); + for (Vulnerability vulnerability : vulnerabilities) { + vulnerabilityAdapter.toJson(writer, vulnerability); + } + writer.endArray(); + } + writer.endObject(); + } + + private static class TruncatedVulnerabilityAdapter extends FormattingAdapter { + + private final JsonAdapter vulnerabilityTypeAdapter; + + private final JsonAdapter evidenceAdapter; + + private final JsonAdapter locationAdapter; + + public TruncatedVulnerabilityAdapter(Moshi moshi) { + this.vulnerabilityTypeAdapter = moshi.adapter(VulnerabilityType.class); + this.evidenceAdapter = new TruncatedEvidenceAdapter(); + this.locationAdapter = moshi.adapter(Location.class); + } + + @Override + public void toJson(@Nonnull JsonWriter writer, @Nullable Vulnerability value) + throws IOException { + if (value == null) { + return; + } + writer.beginObject(); + writer.name("type"); + vulnerabilityTypeAdapter.toJson(writer, value.getType()); + writer.name("evidence"); + evidenceAdapter.toJson(writer, value.getEvidence()); + writer.name("hash"); + writer.value(value.getHash()); + writer.name("location"); + locationAdapter.toJson(writer, value.getLocation()); + writer.endObject(); + } + } + + private static class TruncatedEvidenceAdapter extends FormattingAdapter { + @Override + public void toJson(@Nonnull JsonWriter writer, @Nullable Evidence evidence) throws IOException { + writer.beginObject(); + writer.name("value"); + writer.value(MAX_SIZE_EXCEEDED); + writer.endObject(); + } + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/TruncationUtils.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/TruncationUtils.java new file mode 100644 index 00000000000..7125ae86ead --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/TruncationUtils.java @@ -0,0 +1,26 @@ +package com.datadog.iast.model.json; + +import com.squareup.moshi.JsonWriter; +import datadog.trace.api.Config; +import java.io.IOException; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public final class TruncationUtils { + private static final int VALUE_MAX_LENGTH = Config.get().getIastTruncationMaxValueLength(); + private static final String TRUNCATED = "truncated"; + private static final String RIGHT = "right"; + + private TruncationUtils() {} + + public static void writeTruncableValue(@Nonnull JsonWriter writer, @Nullable String value) + throws IOException { + if (value != null && value.length() > VALUE_MAX_LENGTH) { + writer.value(value.substring(0, VALUE_MAX_LENGTH)); + writer.name(TRUNCATED); + writer.value(RIGHT); + } else { + writer.value(value); + } + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/VulnerabilityEncoding.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/VulnerabilityEncoding.java index e777953632a..59c6c1c5b62 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/VulnerabilityEncoding.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/model/json/VulnerabilityEncoding.java @@ -3,16 +3,37 @@ import com.datadog.iast.model.VulnerabilityBatch; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class VulnerabilityEncoding { + private static final Logger log = LoggerFactory.getLogger(VulnerabilityEncoding.class); + private static final int MAX_SPAN_TAG_SIZE = 25000; + static final Moshi MOSHI = new Moshi.Builder().add(new SourceTypeAdapter()).add(new AdapterFactory()).build(); private static final JsonAdapter BATCH_ADAPTER = MOSHI.adapter(VulnerabilityBatch.class); + private static final JsonAdapter TRUNCATED_VULNERABILITIES_ADAPTER = + MOSHI.adapter(TruncatedVulnerabilities.class); + public static String toJson(final VulnerabilityBatch value) { - return BATCH_ADAPTER.toJson(value); + try { + String json = BATCH_ADAPTER.toJson(value); + return json.getBytes().length > MAX_SPAN_TAG_SIZE + ? getExceededTagSizeJson(new TruncatedVulnerabilities(value.getVulnerabilities())) + : json; + } catch (Exception ex) { + log.debug("Vulnerability serialization error", ex); + return "{\"vulnerabilities\":[]}"; + } + } + + static String getExceededTagSizeJson(final TruncatedVulnerabilities truncatedVulnerabilities) { + // TODO report via telemetry + return TRUNCATED_VULNERABILITIES_ADAPTER.toJson(truncatedVulnerabilities); } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/Operation.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/Operation.java index f0d75012887..c21cec9d9c7 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/Operation.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/Operation.java @@ -1,7 +1,9 @@ package com.datadog.iast.overhead; +import javax.annotation.Nullable; + public interface Operation { - boolean hasQuota(final OverheadContext context); + boolean hasQuota(@Nullable final OverheadContext context); - boolean consumeQuota(final OverheadContext context); + boolean consumeQuota(@Nullable final OverheadContext context); } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/Operations.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/Operations.java index c9eab7b7237..e7bb35f1550 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/Operations.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/Operations.java @@ -1,5 +1,7 @@ package com.datadog.iast.overhead; +import javax.annotation.Nullable; + public class Operations { private Operations() {} @@ -7,7 +9,7 @@ private Operations() {} public static final Operation REPORT_VULNERABILITY = new Operation() { @Override - public boolean hasQuota(final OverheadContext context) { + public boolean hasQuota(@Nullable final OverheadContext context) { if (context == null) { return false; } @@ -15,7 +17,7 @@ public boolean hasQuota(final OverheadContext context) { } @Override - public boolean consumeQuota(final OverheadContext context) { + public boolean consumeQuota(@Nullable final OverheadContext context) { if (context == null) { return false; } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/OverheadController.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/OverheadController.java index 26dd0b9d9fe..44f62063030 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/OverheadController.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/overhead/OverheadController.java @@ -12,7 +12,8 @@ import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import datadog.trace.util.AgentTaskScheduler; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -24,9 +25,9 @@ public interface OverheadController { int releaseRequest(); - boolean hasQuota(final Operation operation, final AgentSpan span); + boolean hasQuota(final Operation operation, @Nullable final AgentSpan span); - boolean consumeQuota(final Operation operation, final AgentSpan span); + boolean consumeQuota(final Operation operation, @Nullable final AgentSpan span); static OverheadController build(final Config config, final AgentTaskScheduler scheduler) { final OverheadControllerImpl result = new OverheadControllerImpl(config, scheduler); @@ -68,7 +69,7 @@ public int releaseRequest() { } @Override - public boolean hasQuota(final Operation operation, final AgentSpan span) { + public boolean hasQuota(final Operation operation, @Nullable final AgentSpan span) { final boolean result = delegate.hasQuota(operation, span); if (LOGGER.isDebugEnabled()) { LOGGER.debug( @@ -82,7 +83,7 @@ public boolean hasQuota(final Operation operation, final AgentSpan span) { } @Override - public boolean consumeQuota(final Operation operation, final AgentSpan span) { + public boolean consumeQuota(final Operation operation, @Nullable final AgentSpan span) { final boolean result = delegate.consumeQuota(operation, span); if (LOGGER.isDebugEnabled()) { LOGGER.debug( @@ -103,7 +104,7 @@ public void reset() { } } - private int getAvailableQuote(final AgentSpan span) { + private int getAvailableQuote(@Nullable final AgentSpan span) { final OverheadContext context = delegate.getContext(span); return context == null ? -1 : context.getAvailableQuota(); } @@ -117,13 +118,14 @@ class OverheadControllerImpl implements OverheadController { final NonBlockingSemaphore availableRequests; - final AtomicInteger executedRequests = new AtomicInteger(0); + final AtomicLong cumulativeCounter; final OverheadContext globalContext = new OverheadContext(); public OverheadControllerImpl(final Config config, final AgentTaskScheduler taskScheduler) { sampling = computeSamplingParameter(config.getIastRequestSampling()); availableRequests = maxConcurrentRequests(config.getIastMaxConcurrentRequests()); + cumulativeCounter = new AtomicLong(sampling); if (taskScheduler != null) { taskScheduler.scheduleAtFixedRate( this::reset, 2 * RESET_PERIOD_SECONDS, RESET_PERIOD_SECONDS, TimeUnit.SECONDS); @@ -132,11 +134,14 @@ public OverheadControllerImpl(final Config config, final AgentTaskScheduler task @Override public boolean acquireRequest() { - if (executedRequests.incrementAndGet() % sampling != 0) { - // Skipped by sampling - return false; + long prevValue = cumulativeCounter.getAndAdd(sampling); + long newValue = prevValue + sampling; + if (newValue / 100 == prevValue / 100 + 1) { + // Sample request + return availableRequests.acquire(); } - return availableRequests.acquire(); + // Skipped by sampling + return false; } @Override @@ -145,16 +150,17 @@ public int releaseRequest() { } @Override - public boolean hasQuota(final Operation operation, final AgentSpan span) { + public boolean hasQuota(final Operation operation, @Nullable final AgentSpan span) { return operation.hasQuota(getContext(span)); } @Override - public boolean consumeQuota(final Operation operation, final AgentSpan span) { + public boolean consumeQuota(final Operation operation, @Nullable final AgentSpan span) { return operation.consumeQuota(getContext(span)); } - public OverheadContext getContext(final AgentSpan span) { + @Nullable + public OverheadContext getContext(@Nullable final AgentSpan span) { final RequestContext requestContext = span != null ? span.getRequestContext() : null; if (requestContext != null) { IastRequestContext iastRequestContext = requestContext.getData(RequestContextSlot.IAST); @@ -165,14 +171,14 @@ public OverheadContext getContext(final AgentSpan span) { static int computeSamplingParameter(final float pct) { if (pct >= 100) { - return 1; + return 100; } if (pct <= 0) { // We don't support disabling IAST by setting it, so we set it to 100%. // TODO: We probably want a warning here. - return 1; + return 100; } - return Math.round(100 / pct); + return (int) pct; } static NonBlockingSemaphore maxConcurrentRequests(final int max) { diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/propagation/PropagationModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/propagation/PropagationModuleImpl.java index 64122f158a0..ea0abe982b3 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/propagation/PropagationModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/propagation/PropagationModuleImpl.java @@ -1,8 +1,8 @@ package com.datadog.iast.propagation; -import static com.datadog.iast.model.Range.NOT_MARKED; import static com.datadog.iast.taint.Ranges.highestPriorityRange; -import static com.datadog.iast.taint.Tainteds.canBeTainted; +import static com.datadog.iast.util.ObjectVisitor.State.CONTINUE; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Range; @@ -10,297 +10,499 @@ import com.datadog.iast.taint.Ranges; import com.datadog.iast.taint.TaintedObject; import com.datadog.iast.taint.TaintedObjects; -import datadog.trace.api.iast.SourceTypes; +import com.datadog.iast.taint.Tainteds; +import com.datadog.iast.util.ObjectVisitor; +import datadog.trace.api.Config; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.Taintable; import datadog.trace.api.iast.propagation.PropagationModule; -import java.util.Collection; -import java.util.List; -import java.util.Map; +import java.util.function.Predicate; +import javax.annotation.Nonnull; import javax.annotation.Nullable; +import org.jetbrains.annotations.Contract; public class PropagationModuleImpl implements PropagationModule { + /** Prevent copy of values bigger than this threshold */ + private static final int MAX_VALUE_LENGTH = Config.get().getIastTruncationMaxValueLength(); + @Override - public void taintIfInputIsTainted(@Nullable final Object toTaint, @Nullable final Object input) { - if (toTaint == null || input == null) { - return; - } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - final Source source = highestPriorityTaintedSource(taintedObjects, input); - if (source != null) { - taintObject(taintedObjects, toTaint, source); - } + public void taint(@Nullable final Object target, final byte origin) { + taint(target, origin, null); } @Override - public void taintIfInputIsTainted(@Nullable final String toTaint, @Nullable final Object input) { - if (!canBeTainted(toTaint) || input == null) { - return; - } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - final Source source = highestPriorityTaintedSource(taintedObjects, input); - if (source != null) { - taintString(taintedObjects, toTaint, source); - } + public void taint( + @Nullable final Object target, final byte origin, @Nullable final CharSequence name) { + taint(target, origin, name, sourceValue(target)); } @Override - public void taintIfInputIsTainted( + public void taint( + @Nullable final Object target, final byte origin, - @Nullable final String name, - @Nullable final String toTaint, - @Nullable final Object input) { - if (!canBeTainted(toTaint) || input == null) { + @Nullable final CharSequence name, + @Nullable final CharSequence value) { + if (!canBeTainted(target)) { return; } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - if (isTainted(taintedObjects, input)) { - taintString(taintedObjects, toTaint, new Source(origin, name, toTaint)); - } + taint(LazyContext.build(), target, origin, name, value); } @Override - public void taintIfInputIsTainted( - final byte origin, - @Nullable final String name, - @Nullable final Collection toTaintCollection, - @Nullable final Object input) { - if (toTaintCollection == null || toTaintCollection.isEmpty() || input == null) { - return; - } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - if (isTainted(taintedObjects, input)) { - for (final String toTaint : toTaintCollection) { - if (canBeTainted(toTaint)) { - taintString(taintedObjects, toTaint, new Source(origin, name, toTaint)); - } - } - } + public void taint( + @Nullable final IastContext ctx, @Nullable final Object target, final byte origin) { + taint(ctx, target, origin, null); } @Override - public void taintIfInputIsTainted( + public void taint( + @Nullable final IastContext ctx, + @Nullable final Object target, final byte origin, - @Nullable final Collection toTaintCollection, - @Nullable final Object input) { - if (toTaintCollection == null || toTaintCollection.isEmpty() || input == null) { - return; - } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - if (isTainted(taintedObjects, input)) { - for (final String toTaint : toTaintCollection) { - if (canBeTainted(toTaint)) { - taintString(taintedObjects, toTaint, new Source(origin, toTaint, toTaint)); - } - } - } + @Nullable final CharSequence name) { + taint(ctx, target, origin, name, sourceValue(target)); } @Override - public void taintIfInputIsTainted( + public void taint( + @Nullable final IastContext ctx, + @Nullable final Object target, final byte origin, - @Nullable final List> toTaintCollection, - @Nullable final Object input) { - if (toTaintCollection == null || toTaintCollection.isEmpty() || input == null) { + @Nullable final CharSequence name, + @Nullable final CharSequence value) { + if (!canBeTainted(target)) { return; } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - if (isTainted(taintedObjects, input)) { - for (final Map.Entry entry : toTaintCollection) { - final String name = entry.getKey(); - if (canBeTainted(name)) { - taintString( - taintedObjects, name, new Source(SourceTypes.namedSource(origin), name, name)); - } - final String toTaint = entry.getValue(); - if (canBeTainted(toTaint)) { - taintString(taintedObjects, toTaint, new Source(origin, name, toTaint)); - } - } - } + internalTaint(ctx, target, new Source(origin, name, sourceValue(target, value)), NOT_MARKED); + } + + @Override + public void taintIfTainted(@Nullable final Object target, @Nullable final Object input) { + taintIfTainted(target, input, false, NOT_MARKED); } @Override - public void taintIfAnyInputIsTainted( - @Nullable final Object toTaint, @Nullable final Object... inputs) { - if (toTaint == null || inputs == null || inputs.length == 0) { + public void taintIfTainted( + @Nullable final Object target, @Nullable final Object input, boolean keepRanges, int mark) { + if (!canBeTainted(target) || !canBeTainted(input)) { return; } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - for (final Object input : inputs) { - final Source source = highestPriorityTaintedSource(taintedObjects, input); - if (source != null) { - taintObject(taintedObjects, toTaint, source); - return; - } - } + taintIfTainted(LazyContext.build(), target, input, keepRanges, mark); } @Override - public void taint(final byte source, @Nullable final String name, @Nullable final String value) { - if (!canBeTainted(value)) { + public void taintIfTainted( + @Nullable final IastContext ctx, + @Nullable final Object target, + @Nullable final Object input) { + taintIfTainted(ctx, target, input, false, NOT_MARKED); + } + + @Override + public void taintIfTainted( + @Nullable final IastContext ctx, + @Nullable final Object target, + @Nullable final Object input, + boolean keepRanges, + int mark) { + if (!canBeTainted(target) || !canBeTainted(input)) { return; } - final IastRequestContext ctx = IastRequestContext.get(); - if (ctx == null) { - return; + if (keepRanges) { + internalTaint(ctx, target, getRanges(ctx, input), mark); + } else { + internalTaint(ctx, target, highestPrioritySource(ctx, input), mark); } - final TaintedObjects taintedObjects = ctx.getTaintedObjects(); - taintedObjects.taintInputString(value, new Source(source, name, value)); } @Override - public void taint( - @Nullable final Object ctx_, - final byte source, - @Nullable final String name, - @Nullable final String value) { - if (ctx_ == null || !canBeTainted(value)) { + public void taintIfTainted( + @Nullable final Object target, @Nullable final Object input, final byte origin) { + taintIfTainted(target, input, origin, null); + } + + @Override + public void taintIfTainted( + @Nullable final Object target, + @Nullable final Object input, + final byte origin, + @Nullable final CharSequence name) { + taintIfTainted(target, input, origin, name, sourceValue(target)); + } + + @Override + public void taintIfTainted( + @Nullable final Object target, + @Nullable final Object input, + final byte origin, + @Nullable final CharSequence name, + @Nullable final CharSequence value) { + if (!canBeTainted(target) || !canBeTainted(input)) { return; } - final IastRequestContext ctx = (IastRequestContext) ctx_; - final TaintedObjects taintedObjects = ctx.getTaintedObjects(); - taintedObjects.taintInputString(value, new Source(source, name, value)); + taintIfTainted(LazyContext.build(), target, input, origin, name, value); + } + + @Override + public void taintIfTainted( + @Nullable final IastContext ctx, + @Nullable final Object target, + @Nullable final Object input, + final byte origin) { + taintIfTainted(ctx, target, input, origin, null); + } + + @Override + public void taintIfTainted( + @Nullable final IastContext ctx, + @Nullable final Object target, + @Nullable final Object input, + final byte origin, + @Nullable final CharSequence name) { + taintIfTainted(ctx, target, input, origin, name, sourceValue(target)); } @Override - public void taintObjectIfInputIsTaintedKeepingRanges( - @Nullable final Object toTaint, @Nullable Object input) { - if (toTaint == null || input == null) { + public void taintIfTainted( + @Nullable final IastContext ctx, + @Nullable final Object target, + @Nullable final Object input, + final byte origin, + @Nullable final CharSequence name, + @Nullable final CharSequence value) { + if (!canBeTainted(target) || !canBeTainted(input)) { return; } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - final Range[] ranges = getTaintedRanges(taintedObjects, input); - if (ranges != null && ranges.length > 0) { - taintedObjects.taint(toTaint, ranges); + if (isTainted(ctx, input)) { + internalTaint(ctx, target, new Source(origin, name, sourceValue(target, value)), NOT_MARKED); } } @Override - public void taintObject(final byte origin, @Nullable final Object toTaint) { - if (toTaint == null) { - return; - } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(false); - if (taintedObjects == null) { + public void taintIfAnyTainted(@Nullable final Object target, @Nullable final Object[] inputs) { + taintIfAnyTainted(target, inputs, false, NOT_MARKED); + } + + @Override + public void taintIfAnyTainted( + @Nullable final Object target, + @Nullable final Object[] inputs, + final boolean keepRanges, + final int mark) { + if (!canBeTainted(target) || !canBeTainted(inputs)) { return; } - final Source source = new Source(origin, null, null); - taintObject(taintedObjects, toTaint, source); + taintIfAnyTainted(LazyContext.build(), target, inputs, keepRanges, mark); } @Override - public void taintObjects(final byte origin, @Nullable final Object[] toTaintArray) { - if (toTaintArray == null || toTaintArray.length == 0) { + public void taintIfAnyTainted( + @Nullable final IastContext ctx, + @Nullable final Object target, + @Nullable final Object[] inputs) { + taintIfAnyTainted(ctx, target, inputs, false, NOT_MARKED); + } + + @Override + public void taintIfAnyTainted( + @Nullable final IastContext ctx, + @Nullable final Object target, + @Nullable final Object[] inputs, + final boolean keepRanges, + final int mark) { + if (!canBeTainted(target) || !canBeTainted(inputs)) { return; } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - final Source source = new Source(origin, null, null); - for (final Object toTaint : toTaintArray) { - taintObject(taintedObjects, toTaint, source); + if (keepRanges) { + final Range[] ranges = getRangesInArray(ctx, inputs); + if (ranges != null) { + internalTaint(ctx, target, ranges, mark); + } + } else { + final Source source = highestPrioritySourceInArray(ctx, inputs); + if (source != null) { + internalTaint(ctx, target, source, mark); + } } } @Override - public boolean isTainted(@Nullable Object obj) { - if (obj instanceof Taintable) { - return ((Taintable) obj).$DD$isTainted(); - } - - if (obj == null) { - return false; + public void taintDeeply( + @Nullable final Object target, final byte origin, final Predicate> classFilter) { + if (!canBeTainted(target)) { + return; } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - return taintedObjects.get(obj) != null; + taintDeeply(LazyContext.build(), target, origin, classFilter); } @Override - public void taintObjects( - final byte origin, @Nullable final Collection toTaintCollection) { - if (toTaintCollection == null || toTaintCollection.isEmpty()) { + public void taintDeeply( + @Nullable final IastContext ctx, + @Nullable final Object target, + final byte origin, + final Predicate> classFilter) { + if (!canBeTainted(target)) { return; } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - final Source source = new Source(origin, null, null); - for (final Object toTaint : toTaintCollection) { - taintObject(taintedObjects, toTaint, source); + final TaintedObjects to = getTaintedObjects(ctx); + if (to == null) { + return; + } + if (target instanceof CharSequence) { + internalTaint(ctx, target, new Source(origin, null, sourceValue(target)), NOT_MARKED); + } else { + ObjectVisitor.visit(target, new TaintingVisitor(to, origin), classFilter); } } + @Nullable @Override - public void taint( - byte origin, @Nullable String name, @Nullable String value, @Nullable Taintable t) { - if (t == null) { - return; - } - t.$$DD$setSource(new Source(origin, name, value)); + public Taintable.Source findSource(@Nullable final Object target) { + return target == null ? null : findSource(LazyContext.build(), target); } + @Nullable @Override - public Taintable.Source firstTaintedSource(@Nullable final Object input) { - if (input == null) { + public Taintable.Source findSource( + @Nullable final IastContext ctx, @Nullable final Object target) { + if (target == null) { return null; } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - return highestPriorityTaintedSource(taintedObjects, input); + return highestPrioritySource(ctx, target); } @Override - public void taintIfInputIsTaintedWithMarks( - @Nullable final String toTaint, @Nullable final Object input, final int mark) { - if (!canBeTainted(toTaint) || input == null) { - return; + public boolean isTainted(@Nullable final Object target) { + return target != null && isTainted(LazyContext.build(), target); + } + + @Override + public boolean isTainted(@Nullable final IastContext ctx, @Nullable final Object target) { + return target != null && findSource(ctx, target) != null; + } + + /** + * Compares origin and value to check if they are the same reference in order to prevent retaining + * references + * + * @see #sourceValue(Object) + */ + @Nullable + private static CharSequence sourceValue( + @Nullable final Object origin, @Nullable final CharSequence value) { + if (value != null && origin == value) { + return sourceValue(value); + } + return value; + } + + /** + * This method will prevent the code from creating a strong reference to what should remain weak + */ + @Nullable + private static CharSequence sourceValue(@Nullable final Object target) { + if (target instanceof String) { + final String string = (String) target; + if (MAX_VALUE_LENGTH > string.length()) { + return String.copyValueOf(string.toCharArray()); + } else { + final char[] chars = new char[MAX_VALUE_LENGTH]; + string.getChars(0, MAX_VALUE_LENGTH, chars, 0); + return String.copyValueOf(chars); + } + } else if (target instanceof CharSequence) { + final CharSequence charSequence = (CharSequence) target; + if (MAX_VALUE_LENGTH > charSequence.length()) { + return charSequence.toString(); + } else { + final CharSequence subSequence = charSequence.subSequence(0, MAX_VALUE_LENGTH); + return subSequence.toString(); + } + } + return null; + } + + @Contract("null -> false") + private static boolean canBeTainted(@Nullable final Object target) { + if (target == null) { + return false; } - final TaintedObjects taintedObjects = TaintedObjects.activeTaintedObjects(true); - final Range[] ranges = getTaintedRanges(taintedObjects, input); - if (ranges != null && ranges.length > 0) { - Range priorityRange = highestPriorityRange(ranges); - taintedObjects.taintInputString( - toTaint, priorityRange.getSource(), priorityRange.getMarks() | mark); + if (target instanceof CharSequence) { + return Tainteds.canBeTainted((CharSequence) target); } + return true; } - private static void taintString( - final TaintedObjects taintedObjects, final String toTaint, final Source source) { - taintedObjects.taintInputString(toTaint, source); + @Contract("null -> false") + private static boolean canBeTainted(@Nullable final Object[] target) { + if (target == null || target.length == 0) { + return false; + } + return true; } - private static void taintObject( - final TaintedObjects taintedObjects, final Object toTaint, final Source source) { - if (toTaint instanceof Taintable) { - ((Taintable) toTaint).$$DD$setSource(source); - } else { - taintedObjects.taintInputObject(toTaint, source); + @Nullable + private static TaintedObjects getTaintedObjects(final @Nullable IastContext ctx) { + IastRequestContext iastCtx = null; + if (ctx instanceof IastRequestContext) { + iastCtx = (IastRequestContext) ctx; + } else if (ctx instanceof LazyContext) { + iastCtx = ((LazyContext) ctx).getDelegate(); + } + return iastCtx == null ? null : iastCtx.getTaintedObjects(); + } + + @Nullable + private static Range[] getRangesInArray( + final @Nullable IastContext ctx, final @Nonnull Object[] objects) { + for (final Object object : objects) { + final Range[] ranges = getRanges(ctx, object); + if (ranges != null) { + return ranges; + } + } + return null; + } + + @Nullable + private static Range[] getRanges(final @Nullable IastContext ctx, final @Nonnull Object object) { + if (object instanceof Taintable) { + final Source source = highestPrioritySource(ctx, object); + if (source == null) { + return null; + } else { + return new Range[] {new Range(0, Integer.MAX_VALUE, source, NOT_MARKED)}; + } } + final TaintedObjects to = getTaintedObjects(ctx); + if (to == null) { + return null; + } + final TaintedObject tainted = to.get(object); + return tainted == null ? null : tainted.getRanges(); } - private static boolean isTainted(final TaintedObjects taintedObjects, final Object object) { - return highestPriorityTaintedSource(taintedObjects, object) != null; + @Nullable + private static Source highestPrioritySourceInArray( + final @Nullable IastContext ctx, final @Nonnull Object[] objects) { + for (final Object object : objects) { + final Source source = highestPrioritySource(ctx, object); + if (source != null) { + return source; + } + } + return null; } - private static Source highestPriorityTaintedSource( - final TaintedObjects taintedObjects, final Object object) { + @Nullable + private static Source highestPrioritySource( + final @Nullable IastContext ctx, final @Nonnull Object object) { if (object instanceof Taintable) { return (Source) ((Taintable) object).$$DD$getSource(); } else { - final TaintedObject tainted = taintedObjects.get(object); - final Range[] ranges = tainted == null ? null : tainted.getRanges(); + final Range[] ranges = getRanges(ctx, object); return ranges != null && ranges.length > 0 ? highestPriorityRange(ranges).getSource() : null; } } - private static Range[] getTaintedRanges( - final TaintedObjects taintedObjects, final Object object) { - if (object instanceof Taintable) { - Source source = (Source) ((Taintable) object).$$DD$getSource(); - if (source == null) { - return null; + private static void internalTaint( + @Nullable final IastContext ctx, + @Nonnull final Object value, + @Nullable final Source source, + int mark) { + if (source == null) { + return; + } + if (value instanceof Taintable) { + ((Taintable) value).$$DD$setSource(source); + } else { + final TaintedObjects to = getTaintedObjects(ctx); + if (to == null) { + return; + } + if (value instanceof CharSequence) { + to.taint(value, Ranges.forCharSequence((CharSequence) value, source, mark)); } else { - return Ranges.forObject(source, NOT_MARKED); + to.taint(value, Ranges.forObject(source, mark)); } + } + } + + private static void internalTaint( + @Nullable final IastContext ctx, + @Nonnull final Object value, + @Nullable final Range[] ranges, + final int mark) { + if (ranges == null || ranges.length == 0) { + return; + } + if (value instanceof Taintable) { + ((Taintable) value).$$DD$setSource(ranges[0].getSource()); } else { - final TaintedObject tainted = taintedObjects.get(object); - return tainted == null ? null : tainted.getRanges(); + final TaintedObjects to = getTaintedObjects(ctx); + if (to != null) { + final Range[] markedRanges = markRanges(ranges, mark); + to.taint(value, markedRanges); + } + } + } + + @Nonnull + private static Range[] markRanges(@Nonnull final Range[] ranges, final int mark) { + if (mark == NOT_MARKED) { + return ranges; + } + final Range[] result = new Range[ranges.length]; + for (int i = 0; i < ranges.length; i++) { + final Range range = ranges[i]; + final int newMark = range.getMarks() | mark; + result[i] = new Range(range.getStart(), range.getLength(), range.getSource(), newMark); + } + return result; + } + + private static class LazyContext implements IastContext { + + private boolean fetched; + @Nullable private IastRequestContext delegate; + + @Nullable + private IastRequestContext getDelegate() { + if (!fetched) { + fetched = true; + delegate = IastRequestContext.get(); + } + return delegate; + } + + public static IastContext build() { + return new LazyContext(); + } + } + + private static class TaintingVisitor implements ObjectVisitor.Visitor { + + private final TaintedObjects taintedObjects; + private final byte origin; + + private TaintingVisitor(@Nonnull final TaintedObjects taintedObjects, final byte origin) { + this.taintedObjects = taintedObjects; + this.origin = origin; + } + + @Nonnull + @Override + public ObjectVisitor.State visit(@Nonnull final String path, @Nonnull final Object value) { + if (value instanceof CharSequence) { + final CharSequence charSequence = (CharSequence) value; + if (canBeTainted(charSequence)) { + final Source source = new Source(origin, path, sourceValue(value)); + taintedObjects.taint( + charSequence, Ranges.forCharSequence(charSequence, source, NOT_MARKED)); + } + } + return CONTINUE; } } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/propagation/StringModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/propagation/StringModuleImpl.java index 36f4c1897c4..a93d3c53e8e 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/propagation/StringModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/propagation/StringModuleImpl.java @@ -9,19 +9,19 @@ import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Range; -import com.datadog.iast.model.Source; import com.datadog.iast.taint.Ranges; +import com.datadog.iast.taint.Ranges.RangeList; +import com.datadog.iast.taint.Ranges.RangesProvider; import com.datadog.iast.taint.TaintedObject; import com.datadog.iast.taint.TaintedObjects; import com.datadog.iast.util.Ranged; import datadog.trace.api.iast.propagation.StringModule; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import java.util.ArrayList; import java.util.Arrays; import java.util.Deque; import java.util.HashMap; +import java.util.Iterator; import java.util.LinkedList; -import java.util.List; import java.util.Locale; import java.util.Map; import java.util.regex.Matcher; @@ -39,6 +39,7 @@ public class StringModuleImpl implements StringModule { private static final int NULL_STR_LENGTH = "null".length(); + @SuppressWarnings("NullAway") // NullAway fails with taintedLeft and taintedRight checks @Override public void onStringConcat( @Nonnull final String left, @Nullable final String right, @Nonnull final String result) { @@ -153,7 +154,7 @@ public void onStringConcatFactory( final TaintedObjects taintedObjects = ctx.getTaintedObjects(); final Map sourceRanges = new HashMap<>(); - int rangeCount = 0; + long rangeCount = 0; for (int i = 0; i < args.length; i++) { final TaintedObject to = getTainted(taintedObjects, args[i]); if (to != null) { @@ -166,17 +167,19 @@ public void onStringConcatFactory( return; } - final Range[] targetRanges = new Range[rangeCount]; + final Range[] targetRanges = Ranges.newArray(rangeCount); int offset = 0, rangeIndex = 0; for (int item : recipeOffsets) { if (item < 0) { - offset += (-item); + offset += -item; } else { final String argument = args[item]; final Range[] ranges = sourceRanges.get(item); if (ranges != null) { - Ranges.copyShift(ranges, targetRanges, rangeIndex, offset); - rangeIndex += ranges.length; + rangeIndex = insertRange(targetRanges, ranges, offset, rangeIndex); + if (rangeIndex >= targetRanges.length) { + break; + } } offset += getToStringLength(argument); } @@ -212,7 +215,7 @@ public void onStringSubSequence( @Override public void onStringJoin( - @Nullable String result, @Nonnull CharSequence delimiter, @Nonnull CharSequence... elements) { + @Nullable String result, @Nonnull CharSequence delimiter, @Nonnull CharSequence[] elements) { if (!canBeTainted(result)) { return; } @@ -225,33 +228,42 @@ public void onStringJoin( if (getTainted(taintedObjects, result) != null) { return; } - List newRanges = new ArrayList<>(); - int pos = 0; - // Delimiter info - Range[] delimiterRanges = getRanges(getTainted(taintedObjects, delimiter)); - boolean delimiterHasRanges = delimiterRanges.length > 0; - int delimiterLength = delimiter.length(); - + final RangesProvider elementRanges = rangesProviderFor(taintedObjects, elements); + final Range[] delimiterRanges = getRanges(getTainted(taintedObjects, delimiter)); + final long rangeCount = + elementRanges.rangeCount() + ((long) (elements.length - 1) * delimiterRanges.length); + if (rangeCount == 0) { + return; + } + final Range[] targetRanges = Ranges.newArray(rangeCount); + int delimiterLength = getToStringLength(delimiter), offset = 0, rangeIndex = 0; for (int i = 0; i < elements.length; i++) { - CharSequence element = elements[i]; - pos = - getPositionAndUpdateRangesInStringJoin( - taintedObjects, - newRanges, - pos, - delimiterRanges, - delimiterLength, - element, - delimiterHasRanges && i < elements.length - 1); - } - if (!newRanges.isEmpty()) { - taintedObjects.taint(result, newRanges.toArray(new Range[0])); + // insert element ranges + final CharSequence element = elements[i]; + final Range[] ranges = elementRanges.ranges(element); + if (ranges != null) { + rangeIndex = insertRange(targetRanges, ranges, offset, rangeIndex); + if (rangeIndex >= targetRanges.length) { + break; + } + } + offset += getToStringLength(element); + + if (i < elements.length - 1) { + // add delimiter ranges + rangeIndex = insertRange(targetRanges, delimiterRanges, offset, rangeIndex); + if (rangeIndex >= targetRanges.length) { + break; + } + offset += delimiterLength; + } } + taintedObjects.taint(result, targetRanges); } @Override @SuppressFBWarnings("ES_COMPARING_PARAMETER_STRING_WITH_EQ") - public void onStringRepeat(String self, int count, String result) { + public void onStringRepeat(@Nonnull String self, int count, @Nonnull String result) { if (!canBeTainted(self) || !canBeTainted(result) || self == result) { return; } @@ -264,9 +276,13 @@ public void onStringRepeat(String self, int count, String result) { if (selfRanges.length == 0) { return; } - final Range[] ranges = new Range[selfRanges.length * count]; + final Range[] ranges = Ranges.newArray(selfRanges.length * (long) count); + int rangeIndex = 0; for (int i = 0; i < count; i++) { - Ranges.copyShift(selfRanges, ranges, i * selfRanges.length, i * self.length()); + rangeIndex = insertRange(ranges, selfRanges, i * self.length(), rangeIndex); + if (rangeIndex >= ranges.length) { + break; + } } taintedObjects.taint(result, ranges); } @@ -342,40 +358,18 @@ private void stringCaseChangedWithReducedSize( taintedObjects.taint(result, newRanges); } - /** - * Iterates over the element and delimiter ranges (if necessary) to update them and calculate the - * new pos value - */ - private static int getPositionAndUpdateRangesInStringJoin( - TaintedObjects taintedObjects, - List newRanges, - int pos, - Range[] delimiterRanges, - int delimiterLength, - CharSequence element, - boolean addDelimiterRanges) { - if (canBeTainted(element)) { - TaintedObject elementTainted = taintedObjects.get(element); - if (elementTainted != null) { - Range[] elementRanges = elementTainted.getRanges(); - if (elementRanges.length > 0) { - for (Range range : elementRanges) { - newRanges.add(pos == 0 ? range : range.shift(pos)); - } - } - } - } - pos += getToStringLength(element); - if (addDelimiterRanges) { - for (Range range : delimiterRanges) { - newRanges.add(range.shift(pos)); - } + /** Inserts the range in the selected position and returns the new position for further ranges */ + private static int insertRange( + final Range[] targetRanges, final Range[] ranges, final int offset, final int rangeIndex) { + if (ranges.length == 0) { + return rangeIndex; } - pos += delimiterLength; - return pos; + final int count = Math.min(targetRanges.length - rangeIndex, ranges.length); + Ranges.copyShift(ranges, targetRanges, rangeIndex, offset, count); + return rangeIndex + count; } - private static Range[] getRanges(final TaintedObject taintedObject) { + private static Range[] getRanges(@Nullable final TaintedObject taintedObject) { return taintedObject == null ? EMPTY : taintedObject.getRanges(); } @@ -454,7 +448,7 @@ public void onStringFormat( return; } final TaintedObjects to = ctx.getTaintedObjects(); - final Ranges.RangesProvider paramRangesProvider = rangesProviderFor(to, parameters); + final RangesProvider paramRangesProvider = rangesProviderFor(to, parameters); int rangeCount = paramRangesProvider.rangeCount(); final Deque formatRanges = new LinkedList<>(); final TaintedObject formatTainted = to.get(format); @@ -467,7 +461,7 @@ public void onStringFormat( } // params can appear zero or multiple times in the pattern so the final number of ranges is // unknown beforehand - final List finalRanges = new LinkedList<>(); + final RangeList finalRanges = new RangeList(); final Matcher matcher = FORMAT_PATTERN.matcher(format); int offset = 0, paramIndex = 0; while (matcher.find()) { @@ -493,11 +487,53 @@ public void onStringFormat( addParameterTaintedRanges( placeholderRange, parameter, formattedValue, shift, paramRanges, finalRanges); offset += (formattedValue.length() - placeholder.length()); + if (finalRanges.isFull()) { + break; + } } addFormatTaintedRanges( END, offset, formatRanges, finalRanges); // add remaining ranges from the format if (!finalRanges.isEmpty()) { - to.taint(result, finalRanges.toArray(new Range[0])); + to.taint(result, finalRanges.toArray()); + } + } + + @Override + public void onStringFormat( + @Nonnull final Iterable literals, + @Nonnull final Object[] parameters, + @Nonnull final String result) { + if (!canBeTainted(result)) { + return; + } + final IastRequestContext ctx = IastRequestContext.get(); + if (ctx == null) { + return; + } + final TaintedObjects to = ctx.getTaintedObjects(); + final RangesProvider paramRangesProvider = rangesProviderFor(to, parameters); + if (paramRangesProvider.rangeCount() == 0) { + return; + } + // since we might join ranges the final number is unknown beforehand + final RangeList finalRanges = new RangeList(); + int offset = 0, paramIndex = 0; + for (final Iterator it = literals.iterator(); it.hasNext(); ) { + final String literal = it.next(); + offset += literal.length(); + if (it.hasNext() && paramIndex < parameters.length) { + final Object parameter = parameters[paramIndex++]; + final Range[] parameterRanges = paramRangesProvider.ranges(parameter); + final String formatted = String.valueOf(parameter); + addParameterTaintedRanges(null, parameter, formatted, offset, parameterRanges, finalRanges); + offset += formatted.length(); + } + if (finalRanges.isFull()) { + break; + } + } + if (!finalRanges.isEmpty()) { + to.taint(result, finalRanges.toArray()); } } @@ -537,12 +573,12 @@ public void onSplit(@Nonnull String self, @Nonnull String[] result) { * @param finalRanges result with all ranges */ private void addParameterTaintedRanges( - final Range placeholderRange, + @Nullable final Range placeholderRange, final Object param, final String formatted, final int offset, - final Range[] ranges, - /* out */ final List finalRanges) { + @Nullable final Range[] ranges, + /* out */ final RangeList finalRanges) { if (ranges != null && ranges.length > 0) { // only shift ranges if they are character sequences of the same length, otherwise taint the // whole thing @@ -554,7 +590,6 @@ private void addParameterTaintedRanges( finalRanges.add(Ranges.copyWithPosition(ranges[0], offset, formatted.length())); } } else if (placeholderRange != null) { - final Source source = placeholderRange.getSource(); finalRanges.add(Ranges.copyWithPosition(placeholderRange, offset, formatted.length())); } } @@ -568,11 +603,12 @@ private void addParameterTaintedRanges( * @param finalRanges result with all ranges * @return tainted range of the placeholder or {@code null} if not tainted */ + @Nullable private Range addFormatTaintedRanges( final Ranged placeholderPos, final int offset, final Deque ranges, - /* out */ final List finalRanges) { + /* out */ final RangeList finalRanges) { Range formatRange; int end = placeholderPos.getStart() + placeholderPos.getLength(); Range placeholderRange = null; diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/AbstractRegexTokenizer.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/AbstractRegexTokenizer.java index 58a1e8aaec3..54c73669be3 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/AbstractRegexTokenizer.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/AbstractRegexTokenizer.java @@ -4,11 +4,12 @@ import java.util.NoSuchElementException; import java.util.regex.Matcher; import java.util.regex.Pattern; +import javax.annotation.Nullable; public abstract class AbstractRegexTokenizer implements SensitiveHandler.Tokenizer { protected final Matcher matcher; - private Ranged current; + @Nullable private Ranged current; protected AbstractRegexTokenizer(final Pattern pattern, final String evidence) { matcher = pattern.matcher(evidence); diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/SensitiveHandlerImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/SensitiveHandlerImpl.java index 69f01f9b1c8..d62fdf8c95c 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/SensitiveHandlerImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/SensitiveHandlerImpl.java @@ -45,6 +45,7 @@ public SensitiveHandlerImpl() { tokenizers.put(VulnerabilityType.COMMAND_INJECTION, CommandRegexpTokenizer::new); tokenizers.put(VulnerabilityType.SSRF, UrlRegexpTokenizer::new); tokenizers.put(VulnerabilityType.UNVALIDATED_REDIRECT, UrlRegexpTokenizer::new); + tokenizers.put(VulnerabilityType.XSS, TaintedRangeBasedTokenizer::new); } @Override @@ -75,7 +76,7 @@ public Tokenizer tokenizeEvidence( return supplier.tokenizerFor(evidence); } - private int computeLength(final String value) { + private int computeLength(@Nullable final String value) { if (value == null || value.isEmpty()) { return 0; } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/SqlRegexpTokenizer.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/SqlRegexpTokenizer.java index 9082979e890..6c87aaf7a87 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/SqlRegexpTokenizer.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/SqlRegexpTokenizer.java @@ -120,10 +120,6 @@ public static Dialect fromEvidence(final Evidence evidence) { return ANSI; } - public static Dialect current() { - return ANSI; - } - public Pattern buildPattern() { return pattern.get(); } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/TaintedRangeBasedTokenizer.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/TaintedRangeBasedTokenizer.java new file mode 100644 index 00000000000..b2cff3356b4 --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sensitive/TaintedRangeBasedTokenizer.java @@ -0,0 +1,61 @@ +package com.datadog.iast.sensitive; + +import com.datadog.iast.model.Evidence; +import com.datadog.iast.model.Range; +import com.datadog.iast.taint.Ranges; +import com.datadog.iast.util.Ranged; +import java.util.NoSuchElementException; +import javax.annotation.Nullable; + +public class TaintedRangeBasedTokenizer implements SensitiveHandler.Tokenizer { + + private final String value; + private final Range[] ranges; + + @Nullable private Ranged current; + + private int rangesIndex; + + private int pos; + + public TaintedRangeBasedTokenizer(final Evidence evidence) { + this.ranges = evidence.getRanges() == null ? Ranges.EMPTY : evidence.getRanges(); + this.value = evidence.getValue(); + rangesIndex = 0; + pos = 0; // current value position + } + + @Override + public boolean next() { + current = buildNext(); + return current != null; + } + + @Override + public Ranged current() { + if (current == null) { + throw new NoSuchElementException(); + } + return current; + } + + @Nullable + private Ranged buildNext() { + for (; rangesIndex < ranges.length; rangesIndex++) { + Range range = ranges[rangesIndex]; + if (range.getStart() <= pos) { + pos = range.getStart() + range.getLength(); + } else { + Ranged next = Ranged.build(pos, range.getStart() - pos); + pos = range.getStart() + range.getLength(); + return next; + } + } + if (pos < value.length()) { + Ranged next = Ranged.build(pos, value.length() - pos); + pos = value.length(); + return next; + } + return null; + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/CommandInjectionModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/CommandInjectionModuleImpl.java index ea5fa289e6d..24fd88c99c4 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/CommandInjectionModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/CommandInjectionModuleImpl.java @@ -3,6 +3,7 @@ import static com.datadog.iast.taint.Ranges.rangesProviderFor; import static com.datadog.iast.taint.Tainteds.canBeTainted; +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.VulnerabilityType; import com.datadog.iast.taint.TaintedObjects; @@ -15,6 +16,10 @@ public class CommandInjectionModuleImpl extends SinkModuleBase implements CommandInjectionModule { + public CommandInjectionModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + @Override public void onRuntimeExec(@Nullable final String... cmdArray) { if (!canBeTainted(cmdArray)) { diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/HstsMissingHeaderModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/HstsMissingHeaderModuleImpl.java new file mode 100644 index 00000000000..801d7e51052 --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/HstsMissingHeaderModuleImpl.java @@ -0,0 +1,86 @@ +package com.datadog.iast.sink; + +import com.datadog.iast.Dependencies; +import com.datadog.iast.IastRequestContext; +import com.datadog.iast.model.Location; +import com.datadog.iast.model.Vulnerability; +import com.datadog.iast.model.VulnerabilityType; +import com.datadog.iast.overhead.Operations; +import datadog.trace.api.gateway.IGSpanInfo; +import datadog.trace.api.iast.sink.HstsMissingHeaderModule; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; +import java.util.Locale; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class HstsMissingHeaderModuleImpl extends SinkModuleBase implements HstsMissingHeaderModule { + private static final Pattern MAX_AGE = + Pattern.compile("max-age=(\\d+)", Pattern.CASE_INSENSITIVE); + + private static final Logger LOGGER = LoggerFactory.getLogger(HstsMissingHeaderModuleImpl.class); + + public HstsMissingHeaderModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + + @Override + public void onRequestEnd(final Object iastRequestContextObject, final IGSpanInfo igSpanInfo) { + + final IastRequestContext iastRequestContext = (IastRequestContext) iastRequestContextObject; + + if (!isValidMaxAge(iastRequestContext.getStrictTransportSecurity())) { + try { + Map tags = igSpanInfo.getTags(); + String urlString = (String) tags.get("http.url"); + Integer httpStatus = (Integer) tags.get("http.status_code"); + if (isIgnorableResponseCode(httpStatus)) { + return; + } + if (!isHtmlResponse(iastRequestContext.getContentType())) { + return; + } + if (!isHttps(urlString, iastRequestContext.getxForwardedProto())) { + return; + } + final AgentSpan span = AgentTracer.activeSpan(); + if (overheadController.consumeQuota(Operations.REPORT_VULNERABILITY, span)) { + reporter.report( + span, + new Vulnerability( + VulnerabilityType.HSTS_HEADER_MISSING, Location.forSpan(span), null)); + } + } catch (Throwable e) { + LOGGER.debug("Exception while checking for missing HSTS headers vulnerability", e); + } + } + } + + static boolean isValidMaxAge(@Nullable final String value) { + if (value == null) { + return false; + } + final Matcher matcher = MAX_AGE.matcher(value); + if (!matcher.find()) { + return false; + } + return Integer.parseInt(matcher.group(1)) > 0; + } + + static boolean isHttps(@Nullable final String urlString, @Nullable final String forwardedFor) { + if (urlString == null) { + return false; + } + if (urlString.toLowerCase(Locale.ROOT).startsWith("https://")) { + return true; + } + if (forwardedFor == null) { + return false; + } + return forwardedFor.toLowerCase(Locale.ROOT).contains("https"); + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/HttpResponseHeaderModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/HttpResponseHeaderModuleImpl.java index 01f486502ef..9eea7be16c5 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/HttpResponseHeaderModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/HttpResponseHeaderModuleImpl.java @@ -1,13 +1,18 @@ package com.datadog.iast.sink; +import static com.datadog.iast.util.HttpHeader.Values.SET_COOKIE; import static java.util.Collections.singletonList; +import com.datadog.iast.Dependencies; +import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Evidence; import com.datadog.iast.model.Location; import com.datadog.iast.model.Vulnerability; import com.datadog.iast.model.VulnerabilityType; import com.datadog.iast.overhead.Operations; import com.datadog.iast.util.CookieSecurityParser; +import com.datadog.iast.util.HttpHeader; +import com.datadog.iast.util.HttpHeader.ContextAwareHeader; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.sink.HttpCookieModule; import datadog.trace.api.iast.sink.HttpResponseHeaderModule; @@ -22,15 +27,28 @@ public class HttpResponseHeaderModuleImpl extends SinkModuleBase implements HttpResponseHeaderModule { - private static final String SET_COOKIE_HEADER = "Set-Cookie"; + + public HttpResponseHeaderModuleImpl(final Dependencies dependencies) { + super(dependencies); + } @Override public void onHeader(@Nonnull final String name, final String value) { - if (SET_COOKIE_HEADER.equalsIgnoreCase(name)) { - onCookies(CookieSecurityParser.parse(value)); - } - if (null != InstrumentationBridge.UNVALIDATED_REDIRECT) { - InstrumentationBridge.UNVALIDATED_REDIRECT.onHeader(name, value); + final HttpHeader header = HttpHeader.from(name); + if (header != null) { + if (header instanceof ContextAwareHeader) { + final AgentSpan span = AgentTracer.activeSpan(); + final IastRequestContext ctx = IastRequestContext.get(span); + if (ctx != null) { + ((ContextAwareHeader) header).onHeader(ctx, value); + } + } + if (header == SET_COOKIE) { + onCookies(CookieSecurityParser.parse(value)); + } + if (null != InstrumentationBridge.UNVALIDATED_REDIRECT) { + InstrumentationBridge.UNVALIDATED_REDIRECT.onHeader(name, value); + } } } @@ -48,7 +66,7 @@ private void onCookies(final List cookies) { if (!overheadController.consumeQuota(Operations.REPORT_VULNERABILITY, span)) { return; } - final Location location = Location.forSpanAndStack(spanId(span), getCurrentStackTrace()); + final Location location = Location.forSpanAndStack(span, getCurrentStackTrace()); for (final Map.Entry entry : vulnerable.entrySet()) { final Cookie cookie = entry.getValue(); final Evidence evidence = new Evidence(cookie.getCookieName()); diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/LdapInjectionModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/LdapInjectionModuleImpl.java index 1b0fc2a3cd9..aaa0f999bfb 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/LdapInjectionModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/LdapInjectionModuleImpl.java @@ -3,6 +3,7 @@ import static com.datadog.iast.taint.Ranges.rangesProviderFor; import static com.datadog.iast.taint.Tainteds.canBeTainted; +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.VulnerabilityType; import com.datadog.iast.taint.TaintedObjects; @@ -14,6 +15,10 @@ public class LdapInjectionModuleImpl extends SinkModuleBase implements LdapInjectionModule { + public LdapInjectionModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + @SuppressWarnings("unchecked") @Override public void onDirContextSearch( diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/PathTraversalModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/PathTraversalModuleImpl.java index 2cb6eb4d816..cd5a54862a7 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/PathTraversalModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/PathTraversalModuleImpl.java @@ -4,6 +4,7 @@ import static com.datadog.iast.taint.Tainteds.canBeTainted; import static java.util.Arrays.asList; +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.VulnerabilityType; import com.datadog.iast.taint.TaintedObjects; @@ -20,6 +21,10 @@ public class PathTraversalModuleImpl extends SinkModuleBase implements PathTraversalModule { + public PathTraversalModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + @Override public void onPathTraversal(final @Nullable String path) { if (!canBeTainted(path)) { diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SinkModuleBase.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SinkModuleBase.java index 7691e428f3a..d1a24a5516f 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SinkModuleBase.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SinkModuleBase.java @@ -1,6 +1,9 @@ package com.datadog.iast.sink; -import com.datadog.iast.HasDependencies; +import static com.datadog.iast.util.ObjectVisitor.State.CONTINUE; +import static com.datadog.iast.util.ObjectVisitor.State.EXIT; + +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; import com.datadog.iast.Reporter; import com.datadog.iast.model.Evidence; @@ -14,28 +17,25 @@ import com.datadog.iast.taint.Ranges; import com.datadog.iast.taint.Ranges.RangesProvider; import com.datadog.iast.taint.TaintedObject; -import com.datadog.iast.taint.TaintedObjects; +import com.datadog.iast.util.ObjectVisitor; +import com.datadog.iast.util.ObjectVisitor.State; +import com.datadog.iast.util.ObjectVisitor.Visitor; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.instrumentation.iastinstrumenter.IastExclusionTrie; import datadog.trace.util.stacktrace.StackWalker; -import java.util.Collection; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** Base class with utility methods for with sinks */ -public abstract class SinkModuleBase implements HasDependencies { - private static final int MAX_VISITED_OBJECTS = 1000; - private static final int MAX_RECURSIVE_DEPTH = 10; - protected OverheadController overheadController; - protected Reporter reporter; - protected StackWalker stackWalker; +@SuppressWarnings({"UnusedReturnValue", "SameParameterValue"}) +public abstract class SinkModuleBase { + + protected final OverheadController overheadController; + protected final Reporter reporter; + protected final StackWalker stackWalker; - @Override - public void registerDependencies(@Nonnull final Dependencies dependencies) { + protected SinkModuleBase(@Nonnull final Dependencies dependencies) { overheadController = dependencies.getOverheadController(); reporter = dependencies.getReporter(); stackWalker = dependencies.getStackWalker(); @@ -62,6 +62,16 @@ public void registerDependencies(@Nonnull final Dependencies dependencies) { return result; } + protected final @Nullable Evidence checkInjectionDeeply( + @Nullable final AgentSpan span, + @Nonnull final IastRequestContext ctx, + @Nonnull final InjectionType type, + @Nonnull final E value) { + final InjectionVisitor visitor = new InjectionVisitor(span, ctx, type); + ObjectVisitor.visit(value, visitor); + return visitor.evidence; + } + protected final @Nullable Evidence checkInjection( @Nullable final AgentSpan span, @Nonnull final InjectionType type, @@ -78,6 +88,9 @@ public void registerDependencies(@Nonnull final Dependencies dependencies) { if (rangeProvider.size() == 1) { // only one item and has ranges final E item = rangeProvider.value(0); + if (item == null) { + return null; // should never happen + } evidence = item.toString(); targetRanges = rangeProvider.ranges(item); } else { @@ -159,93 +172,13 @@ protected final void report( @Nonnull final Evidence evidence) { reporter.report( span, - new Vulnerability( - type, Location.forSpanAndStack(spanId(span), getCurrentStackTrace()), evidence)); - } - - protected Object isDeeplyTainted( - @Nonnull final Object value, @Nonnull final TaintedObjects taintedObjects) { - return isDeeplyTaintedRecursive(value, taintedObjects, new HashSet<>(), MAX_RECURSIVE_DEPTH); - } - - private Object isDeeplyTaintedRecursive( - @Nonnull final Object value, - @Nonnull final TaintedObjects taintedObjects, - Set visitedObjects, - int depth) { - if (null == value) { - return null; - } - if (visitedObjects.size() > MAX_VISITED_OBJECTS) { - return null; - } - if (visitedObjects.contains(value)) { - return null; - } - TaintedObject taintedObject = taintedObjects.get(value); - if (null != taintedObject) { - return value; - } else { - if (depth <= 0) { - return null; - } - visitedObjects.add(value); - if (value instanceof Object[]) { - Object[] array = (Object[]) value; - for (int i = 0; i < array.length; i++) { - Object arrayValue = array[i]; - Object result = - isDeeplyTaintedRecursive(arrayValue, taintedObjects, visitedObjects, depth - 1); - if (null != result) { - return result; - } else { - visitedObjects.add(arrayValue); - } - } - return null; - } else if (value instanceof Map) { - for (Map.Entry entry : ((Map) value).entrySet()) { - Object result = - isDeeplyTaintedRecursive(entry.getKey(), taintedObjects, visitedObjects, depth); - if (null != result) { - return result; - } else { - visitedObjects.add(entry.getKey()); - } - result = - isDeeplyTaintedRecursive(entry.getValue(), taintedObjects, visitedObjects, depth - 1); - if (null != result) { - return result; - } else { - visitedObjects.add(entry.getValue()); - } - } - return null; - } else if (value instanceof Collection) { - for (Object object : (Collection) value) { - Object result = - isDeeplyTaintedRecursive(object, taintedObjects, visitedObjects, depth - 1); - if (null != result) { - return result; - } else { - visitedObjects.add(object); - } - } - return null; - } else { - return null; - } - } + new Vulnerability(type, Location.forSpanAndStack(span, getCurrentStackTrace()), evidence)); } protected StackTraceElement getCurrentStackTrace() { return stackWalker.walk(SinkModuleBase::findValidPackageForVulnerability); } - static long spanId(final AgentSpan span) { - return span == null ? 0 : span.getSpanId(); - } - static StackTraceElement findValidPackageForVulnerability( @Nonnull final Stream stream) { final StackTraceElement[] first = new StackTraceElement[1]; @@ -260,4 +193,26 @@ static StackTraceElement findValidPackageForVulnerability( .findFirst() .orElse(first[0]); } + + private class InjectionVisitor implements Visitor { + + @Nullable private final AgentSpan span; + private final IastRequestContext ctx; + private final InjectionType type; + @Nullable private Evidence evidence; + + private InjectionVisitor( + @Nullable final AgentSpan span, final IastRequestContext ctx, final InjectionType type) { + this.span = span; + this.ctx = ctx; + this.type = type; + } + + @Nonnull + @Override + public State visit(@Nonnull final String path, @Nonnull final Object value) { + evidence = checkInjection(span, ctx, type, value); + return evidence != null ? EXIT : CONTINUE; // report first tainted value only + } + } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SqlInjectionModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SqlInjectionModuleImpl.java index 31d42394329..98ad3d757f4 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SqlInjectionModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SqlInjectionModuleImpl.java @@ -1,5 +1,6 @@ package com.datadog.iast.sink; +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Evidence; import com.datadog.iast.model.VulnerabilityType; @@ -10,6 +11,10 @@ public class SqlInjectionModuleImpl extends SinkModuleBase implements SqlInjectionModule { + public SqlInjectionModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + @Override public void onJdbcQuery(@Nullable final String queryString) { onJdbcQuery(queryString, null); diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SsrfModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SsrfModuleImpl.java index b4df8302c94..e397e649c6e 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SsrfModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/SsrfModuleImpl.java @@ -1,5 +1,6 @@ package com.datadog.iast.sink; +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.VulnerabilityType; import datadog.trace.api.iast.sink.SsrfModule; @@ -9,6 +10,10 @@ public class SsrfModuleImpl extends SinkModuleBase implements SsrfModule { + public SsrfModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + @Override public void onURLConnection(@Nullable final Object url) { if (url == null) { diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/TrustBoundaryViolationModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/TrustBoundaryViolationModuleImpl.java index d1b4fb16eaa..5f3ed0707b2 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/TrustBoundaryViolationModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/TrustBoundaryViolationModuleImpl.java @@ -1,10 +1,8 @@ package com.datadog.iast.sink; +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; -import com.datadog.iast.model.Evidence; import com.datadog.iast.model.VulnerabilityType; -import com.datadog.iast.overhead.Operations; -import com.datadog.iast.taint.TaintedObjects; import datadog.trace.api.iast.sink.TrustBoundaryViolationModule; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; @@ -12,24 +10,24 @@ public class TrustBoundaryViolationModuleImpl extends SinkModuleBase implements TrustBoundaryViolationModule { + + public TrustBoundaryViolationModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + @Override public void onSessionValue(@Nonnull String name, Object value) { final AgentSpan span = AgentTracer.activeSpan(); - final IastRequestContext ctx = IastRequestContext.get(span); - if (ctx == null) { + if (span == null) { return; } - TaintedObjects taintedObjects = ctx.getTaintedObjects(); - if (null != taintedObjects.get(name)) { - if (!overheadController.consumeQuota(Operations.REPORT_VULNERABILITY, span)) { - return; - } - report(span, VulnerabilityType.TRUST_BOUNDARY_VIOLATION, new Evidence(name)); + final IastRequestContext ctx = IastRequestContext.get(span); + if (ctx == null) { return; } - Object taintedObject = isDeeplyTainted(value, taintedObjects); - if (null != taintedObject) { - checkInjection(span, ctx, VulnerabilityType.TRUST_BOUNDARY_VIOLATION, taintedObject); + checkInjection(span, ctx, VulnerabilityType.TRUST_BOUNDARY_VIOLATION, name); + if (value != null) { + checkInjectionDeeply(span, ctx, VulnerabilityType.TRUST_BOUNDARY_VIOLATION, value); } } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/UnvalidatedRedirectModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/UnvalidatedRedirectModuleImpl.java index e24fbdb798f..deaf1977898 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/UnvalidatedRedirectModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/UnvalidatedRedirectModuleImpl.java @@ -2,6 +2,7 @@ import static com.datadog.iast.taint.Tainteds.canBeTainted; +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Evidence; import com.datadog.iast.model.Location; @@ -25,6 +26,10 @@ public class UnvalidatedRedirectModuleImpl extends SinkModuleBase private static final String LOCATION_HEADER = "Location"; private static final String REFERER = "Referer"; + public UnvalidatedRedirectModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + @Override public void onRedirect(final @Nullable String value) { if (!canBeTainted(value)) { @@ -89,7 +94,7 @@ private void checkUnvalidatedRedirect( span, new Vulnerability( VulnerabilityType.UNVALIDATED_REDIRECT, - Location.forSpanAndClassAndMethod(span.getSpanId(), clazz, method), + Location.forSpanAndClassAndMethod(span, clazz, method), evidence)); } else { report(span, VulnerabilityType.UNVALIDATED_REDIRECT, evidence); @@ -99,7 +104,7 @@ private void checkUnvalidatedRedirect( private boolean isRefererHeader(Range[] ranges) { for (Range range : ranges) { if (range.getSource().getOrigin() != SourceTypes.REQUEST_HEADER_VALUE - || !range.getSource().getName().equalsIgnoreCase(REFERER)) { + || !REFERER.equalsIgnoreCase(range.getSource().getName())) { return false; } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakCipherModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakCipherModuleImpl.java index f06a52c4f02..0c61867614d 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakCipherModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakCipherModuleImpl.java @@ -1,5 +1,6 @@ package com.datadog.iast.sink; +import com.datadog.iast.Dependencies; import com.datadog.iast.model.Evidence; import com.datadog.iast.model.VulnerabilityType; import com.datadog.iast.overhead.Operations; @@ -14,9 +15,8 @@ public class WeakCipherModuleImpl extends SinkModuleBase implements WeakCipherMo private Config config; - @Override - public void registerDependencies(@Nonnull Dependencies dependencies) { - super.registerDependencies(dependencies); + public WeakCipherModuleImpl(final Dependencies dependencies) { + super(dependencies); config = dependencies.getConfig(); } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakHashModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakHashModuleImpl.java index 3d91d1d371b..88b9cb8b0a4 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakHashModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakHashModuleImpl.java @@ -1,5 +1,6 @@ package com.datadog.iast.sink; +import com.datadog.iast.Dependencies; import com.datadog.iast.model.Evidence; import com.datadog.iast.model.VulnerabilityType; import com.datadog.iast.overhead.Operations; @@ -14,9 +15,8 @@ public class WeakHashModuleImpl extends SinkModuleBase implements WeakHashModule private Config config; - @Override - public void registerDependencies(@Nonnull Dependencies dependencies) { - super.registerDependencies(dependencies); + public WeakHashModuleImpl(final Dependencies dependencies) { + super(dependencies); config = dependencies.getConfig(); } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakRandomnessModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakRandomnessModuleImpl.java index 08c1d278e8f..7c13313eb30 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakRandomnessModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/WeakRandomnessModuleImpl.java @@ -1,7 +1,9 @@ package com.datadog.iast.sink; +import com.datadog.iast.Dependencies; import com.datadog.iast.model.Evidence; import com.datadog.iast.model.VulnerabilityType; +import com.datadog.iast.overhead.Operations; import datadog.trace.api.iast.sink.WeakRandomnessModule; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; @@ -10,12 +12,19 @@ public class WeakRandomnessModuleImpl extends SinkModuleBase implements WeakRandomnessModule { + public WeakRandomnessModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + @Override public void onWeakRandom(@Nonnull final Class instance) { if (isSecuredInstance(instance)) { return; } final AgentSpan span = AgentTracer.activeSpan(); + if (!overheadController.consumeQuota(Operations.REPORT_VULNERABILITY, span)) { + return; + } report(span, VulnerabilityType.WEAK_RANDOMNESS, new Evidence(instance.getName())); } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XContentTypeModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XContentTypeModuleImpl.java new file mode 100644 index 00000000000..8cb864e443e --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XContentTypeModuleImpl.java @@ -0,0 +1,59 @@ +package com.datadog.iast.sink; + +import com.datadog.iast.Dependencies; +import com.datadog.iast.IastRequestContext; +import com.datadog.iast.model.Location; +import com.datadog.iast.model.Vulnerability; +import com.datadog.iast.model.VulnerabilityType; +import com.datadog.iast.overhead.Operations; +import datadog.trace.api.gateway.IGSpanInfo; +import datadog.trace.api.iast.sink.XContentTypeModule; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; +import java.util.Locale; +import java.util.Map; +import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class XContentTypeModuleImpl extends SinkModuleBase implements XContentTypeModule { + private static final Logger LOGGER = LoggerFactory.getLogger(XContentTypeModuleImpl.class); + + public XContentTypeModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + + @Override + public void onRequestEnd(final Object iastRequestContextObject, final IGSpanInfo igSpanInfo) { + try { + + final IastRequestContext iastRequestContext = (IastRequestContext) iastRequestContextObject; + + if (!isNoSniffContentOptions(iastRequestContext.getxContentTypeOptions())) { + if (!isHtmlResponse(iastRequestContext.getContentType())) { + return; + } + Map tags = igSpanInfo.getTags(); + if (isIgnorableResponseCode((Integer) tags.get("http.status_code"))) { + return; + } + final AgentSpan span = AgentTracer.activeSpan(); + if (overheadController.consumeQuota(Operations.REPORT_VULNERABILITY, span)) { + reporter.report( + span, + new Vulnerability( + VulnerabilityType.XCONTENTTYPE_HEADER_MISSING, Location.forSpan(span), null)); + } + } + } catch (Throwable e) { + LOGGER.debug("Exception while checking for missing X Content type optios header", e); + } + } + + static boolean isNoSniffContentOptions(@Nullable final String value) { + if (value == null) { + return false; + } + return value.toLowerCase(Locale.ROOT).contains("nosniff"); + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XPathInjectionModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XPathInjectionModuleImpl.java index 7e501b969a6..31633777c5e 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XPathInjectionModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XPathInjectionModuleImpl.java @@ -2,6 +2,7 @@ import static com.datadog.iast.taint.Tainteds.canBeTainted; +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.VulnerabilityType; import datadog.trace.api.iast.sink.XPathInjectionModule; @@ -10,6 +11,11 @@ import javax.annotation.Nullable; public class XPathInjectionModuleImpl extends SinkModuleBase implements XPathInjectionModule { + + public XPathInjectionModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + @Override public void onExpression(@Nullable String expression) { if (!canBeTainted(expression)) { diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XssModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XssModuleImpl.java index 4c80d271755..da3d1e18337 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XssModuleImpl.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/sink/XssModuleImpl.java @@ -3,8 +3,16 @@ import static com.datadog.iast.taint.Ranges.rangesProviderFor; import static com.datadog.iast.taint.Tainteds.canBeTainted; +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; +import com.datadog.iast.model.Evidence; +import com.datadog.iast.model.Location; +import com.datadog.iast.model.Range; +import com.datadog.iast.model.Vulnerability; import com.datadog.iast.model.VulnerabilityType; +import com.datadog.iast.overhead.Operations; +import com.datadog.iast.taint.Ranges; +import com.datadog.iast.taint.TaintedObject; import com.datadog.iast.taint.TaintedObjects; import datadog.trace.api.iast.sink.XssModule; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -14,6 +22,10 @@ public class XssModuleImpl extends SinkModuleBase implements XssModule { + public XssModuleImpl(final Dependencies dependencies) { + super(dependencies); + } + @Override public void onXss(@Nonnull String s) { if (!canBeTainted(s)) { @@ -27,6 +39,37 @@ public void onXss(@Nonnull String s) { checkInjection(span, ctx, VulnerabilityType.XSS, s); } + @Override + public void onXss(@Nonnull String s, @Nonnull String clazz, @Nonnull String method) { + if (!canBeTainted(s)) { + return; + } + final AgentSpan span = AgentTracer.activeSpan(); + final IastRequestContext ctx = IastRequestContext.get(span); + if (ctx == null) { + return; + } + TaintedObject taintedObject = ctx.getTaintedObjects().get(s); + if (taintedObject == null) { + return; + } + Range[] notMarkedRanges = + Ranges.getNotMarkedRanges(taintedObject.getRanges(), VulnerabilityType.XSS.mark()); + if (notMarkedRanges == null || notMarkedRanges.length == 0) { + return; + } + if (!overheadController.consumeQuota(Operations.REPORT_VULNERABILITY, span)) { + return; + } + final Evidence evidence = new Evidence(s, notMarkedRanges); + reporter.report( + span, + new Vulnerability( + VulnerabilityType.XSS, + Location.forSpanAndClassAndMethod(span, clazz, method), + evidence)); + } + @Override public void onXss(@Nonnull char[] array) { if (array == null || array.length == 0) { @@ -54,4 +97,33 @@ public void onXss(@Nonnull String format, @Nullable Object[] args) { checkInjection( span, VulnerabilityType.XSS, rangesProviderFor(to, format), rangesProviderFor(to, args)); } + + @Override + public void onXss(@Nonnull CharSequence s, @Nullable String file, int line) { + if (!canBeTainted(s) || file == null || file.isEmpty()) { + return; + } + final AgentSpan span = AgentTracer.activeSpan(); + final IastRequestContext ctx = IastRequestContext.get(span); + if (ctx == null) { + return; + } + TaintedObject taintedObject = ctx.getTaintedObjects().get(s); + if (taintedObject == null) { + return; + } + Range[] notMarkedRanges = + Ranges.getNotMarkedRanges(taintedObject.getRanges(), VulnerabilityType.XSS.mark()); + if (notMarkedRanges == null || notMarkedRanges.length == 0) { + return; + } + if (!overheadController.consumeQuota(Operations.REPORT_VULNERABILITY, span)) { + return; + } + final Evidence evidence = new Evidence(s.toString(), notMarkedRanges); + reporter.report( + span, + new Vulnerability( + VulnerabilityType.XSS, Location.forSpanAndFileAndLine(span, file, line), evidence)); + } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/source/WebModuleImpl.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/source/WebModuleImpl.java deleted file mode 100644 index 6702478efb1..00000000000 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/source/WebModuleImpl.java +++ /dev/null @@ -1,139 +0,0 @@ -package com.datadog.iast.source; - -import static com.datadog.iast.taint.Tainteds.canBeTainted; - -import com.datadog.iast.IastRequestContext; -import com.datadog.iast.model.Source; -import com.datadog.iast.taint.TaintedObjects; -import datadog.trace.api.iast.SourceTypes; -import datadog.trace.api.iast.source.WebModule; -import java.util.Collection; -import java.util.Iterator; -import java.util.Map; -import javax.annotation.Nullable; - -public class WebModuleImpl implements WebModule { - - @Override - public void onParameterNames(@Nullable final Collection paramNames) { - onNamed(paramNames, SourceTypes.REQUEST_PARAMETER_NAME); - } - - @Override - public void onParameterValues( - @Nullable final String paramName, @Nullable final String[] paramValues) { - onNamed(paramName, paramValues, SourceTypes.REQUEST_PARAMETER_VALUE); - } - - @Override - public void onParameterValues( - @Nullable final String paramName, @Nullable final Collection paramValues) { - onNamed(paramName, paramValues, SourceTypes.REQUEST_PARAMETER_VALUE); - } - - @Override - public void onParameterValues(@Nullable final Map values) { - onNamed(values, SourceTypes.REQUEST_PARAMETER_VALUE); - } - - @Override - public void onHeaderNames(@Nullable final Collection headerNames) { - onNamed(headerNames, SourceTypes.REQUEST_HEADER_NAME); - } - - @Override - public void onHeaderValues( - @Nullable final String headerName, @Nullable final Collection headerValues) { - onNamed(headerName, headerValues, SourceTypes.REQUEST_HEADER_VALUE); - } - - @Override - public void onCookieNames(@Nullable Iterable cookieNames) { - onNamed(cookieNames, SourceTypes.REQUEST_COOKIE_NAME); - } - - private static void onNamed(@Nullable final Iterable names, final byte source) { - if (names == null) { - return; - } - Iterator iterator = names.iterator(); - if (!iterator.hasNext()) { - return; - } - - final IastRequestContext ctx = IastRequestContext.get(); - if (ctx == null) { - return; - } - final TaintedObjects taintedObjects = ctx.getTaintedObjects(); - do { - String name = iterator.next(); - if (canBeTainted(name)) { - taintedObjects.taintInputString(name, new Source(source, name, name)); - } - } while (iterator.hasNext()); - } - - private static void onNamed( - @Nullable final String name, @Nullable final Iterable values, final byte source) { - if (values == null) { - return; - } - Iterator iterator = values.iterator(); - if (!iterator.hasNext()) { - return; - } - - final IastRequestContext ctx = IastRequestContext.get(); - if (ctx == null) { - return; - } - final TaintedObjects taintedObjects = ctx.getTaintedObjects(); - do { - String value = iterator.next(); - if (canBeTainted(value)) { - taintedObjects.taintInputString(value, new Source(source, name, value)); - } - } while (iterator.hasNext()); - } - - private static void onNamed( - @Nullable final String name, @Nullable final String[] values, final byte source) { - if (values == null || values.length == 0) { - return; - } - final IastRequestContext ctx = IastRequestContext.get(); - if (ctx == null) { - return; - } - final TaintedObjects taintedObjects = ctx.getTaintedObjects(); - for (final String value : values) { - if (canBeTainted(value)) { - taintedObjects.taintInputString(value, new Source(source, name, value)); - } - } - } - - private static void onNamed(@Nullable final Map values, final byte source) { - if (values == null || values.isEmpty()) { - return; - } - final IastRequestContext ctx = IastRequestContext.get(); - if (ctx == null) { - return; - } - final TaintedObjects taintedObjects = ctx.getTaintedObjects(); - final byte nameSource = SourceTypes.namedSource(source); - for (final Map.Entry entry : values.entrySet()) { - final String name = entry.getKey(); - if (canBeTainted(name)) { - taintedObjects.taintInputString(name, new Source(nameSource, name, name)); - } - for (final String value : entry.getValue()) { - if (canBeTainted(value)) { - taintedObjects.taintInputString(value, new Source(source, name, value)); - } - } - } - } -} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/Ranges.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/Ranges.java index e04da6227b0..992655086c5 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/Ranges.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/Ranges.java @@ -1,9 +1,11 @@ package com.datadog.iast.taint; -import static com.datadog.iast.model.Range.NOT_MARKED; +import static com.datadog.iast.taint.TaintedObject.MAX_RANGE_COUNT; +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED; import com.datadog.iast.model.Range; import com.datadog.iast.model.Source; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -39,21 +41,43 @@ public Range[] ranges(final Object value) { private Ranges() {} - public static Range[] forString( - final @Nonnull String obj, final @Nonnull Source source, final int mark) { + public static Range[] forCharSequence( + final @Nonnull CharSequence obj, final @Nonnull Source source) { + return forCharSequence(obj, source, NOT_MARKED); + } + + public static Range[] forCharSequence( + final @Nonnull CharSequence obj, final @Nonnull Source source, final int mark) { return new Range[] {new Range(0, obj.length(), source, mark)}; } + public static Range[] forObject(final @Nonnull Source source) { + return forObject(source, NOT_MARKED); + } + public static Range[] forObject(final @Nonnull Source source, final int mark) { return new Range[] {new Range(0, Integer.MAX_VALUE, source, mark)}; } public static void copyShift( final @Nonnull Range[] src, final @Nonnull Range[] dst, final int dstPos, final int shift) { + copyShift(src, dst, dstPos, shift, src.length); + } + + public static void copyShift( + final @Nonnull Range[] src, + final @Nonnull Range[] dst, + final int dstPos, + final int shift, + final int max) { + final int srcLength = Math.min(max, src.length); + if (srcLength <= 0) { + return; + } if (shift == 0) { - System.arraycopy(src, 0, dst, dstPos, src.length); + System.arraycopy(src, 0, dst, dstPos, srcLength); } else { - for (int iSrc = 0, iDst = dstPos; iSrc < src.length; iSrc++, iDst++) { + for (int iSrc = 0, iDst = dstPos; iSrc < srcLength; iSrc++, iDst++) { dst[iDst] = src[iSrc].shift(shift); } } @@ -61,13 +85,16 @@ public static void copyShift( public static Range[] mergeRanges( final int offset, @Nonnull final Range[] rangesLeft, @Nonnull final Range[] rangesRight) { - final int nRanges = rangesLeft.length + rangesRight.length; - final Range[] ranges = new Range[nRanges]; + final long nRanges = rangesLeft.length + (long) rangesRight.length; + final Range[] ranges = newArray(nRanges); + int remaining = ranges.length; if (rangesLeft.length > 0) { - System.arraycopy(rangesLeft, 0, ranges, 0, rangesLeft.length); + final int count = Math.min(rangesLeft.length, remaining); + System.arraycopy(rangesLeft, 0, ranges, 0, count); + remaining -= count; } - if (rangesRight.length > 0) { - Ranges.copyShift(rangesRight, ranges, rangesLeft.length, offset); + if (rangesRight.length > 0 && remaining > 0) { + Ranges.copyShift(rangesRight, ranges, rangesLeft.length, offset, remaining); } return ranges; } @@ -96,6 +123,7 @@ public static RangesProvider rangesProviderFor( return new ListProvider<>(items, to); } + @Nullable public static Range[] forSubstring(int offset, int length, final @Nonnull Range[] ranges) { int[] includedRangesInterval = getIncludedRangesInterval(offset, length, ranges); @@ -157,6 +185,7 @@ public static int[] getIncludedRangesInterval( return new int[] {start, end}; } + @Nonnull public static Range highestPriorityRange(@Nonnull final Range[] ranges) { /* * This approach is better but not completely correct ideally the highest priority should use the following patterns: @@ -177,19 +206,25 @@ public static Range highestPriorityRange(@Nonnull final Range[] ranges) { return ranges[0]; } + public static Range[] newArray(final long size) { + return new Range[size > MAX_RANGE_COUNT ? MAX_RANGE_COUNT : (int) size]; + } + public interface RangesProvider { int rangeCount(); int size(); + @Nullable E value(final int index); + @Nullable Range[] ranges(final E value); } private abstract static class IterableProvider implements RangesProvider { private final LIST items; - private final Map ranges; + @Nullable private final Map ranges; private final int rangeCount; private IterableProvider(@Nonnull final LIST items, @Nonnull final TaintedObjects to) { @@ -220,11 +255,13 @@ public int rangeCount() { return rangeCount; } + @Nullable @Override public E value(final int index) { return item(items, index); } + @Nullable @Override public Range[] ranges(final E value) { return ranges == null ? null : ranges.get(value); @@ -237,12 +274,13 @@ public int size() { protected abstract int size(@Nonnull final LIST items); + @Nullable protected abstract E item(@Nonnull final LIST items, final int index); } private static class SingleProvider implements RangesProvider { private final E value; - private final TaintedObject tainted; + @Nullable private final TaintedObject tainted; private SingleProvider(@Nonnull final E value, @Nonnull final TaintedObjects to) { this.value = value; @@ -259,11 +297,13 @@ public int size() { return 1; } + @Nullable @Override public E value(int index) { return index == 0 ? value : null; } + @Nullable @Override public Range[] ranges(E value) { return value == this.value && tainted != null ? tainted.getRanges() : null; @@ -281,6 +321,7 @@ protected int size(@Nonnull final E[] items) { return items.length; } + @Nullable @Override protected E item(@Nonnull final E[] items, final int index) { return items[index]; @@ -298,45 +339,15 @@ protected int size(@Nonnull final List items) { return items.size(); } + @Nullable @Override protected E item(@Nonnull final List items, final int index) { return items.get(index); } } - public static Range createIfDifferent(Range range, int start, int length) { - if (start != range.getStart() || length != range.getLength()) { - return new Range(start, length, range.getSource(), range.getMarks()); - } else { - return range; - } - } - - static int calculateSubstringSkippedRanges(int offset, int length, @Nonnull Range[] ranges) { - // calculate how many skipped ranges are there - int skippedRanges = 0; - for (int rangeIndex = 0; rangeIndex < ranges.length; rangeIndex++) { - final Range rangeSelf = ranges[rangeIndex]; - if (rangeSelf.getStart() + rangeSelf.getLength() <= offset) { - skippedRanges++; - } else { - break; - } - } - - for (int rangeIndex = ranges.length - 1; rangeIndex >= 0; rangeIndex--) { - final Range rangeSelf = ranges[rangeIndex]; - if (rangeSelf.getStart() - offset >= length) { - skippedRanges++; - } else { - break; - } - } - - return skippedRanges; - } - - public static Range[] getNotMarkedRanges(final Range[] ranges, final int mark) { + @Nullable + public static Range[] getNotMarkedRanges(@Nullable final Range[] ranges, final int mark) { if (ranges == null) { return null; } @@ -366,4 +377,37 @@ public static Range[] getNotMarkedRanges(final Range[] ranges, final int mark) { public static Range copyWithPosition(final Range range, final int offset, final int length) { return new Range(offset, length, range.getSource(), range.getMarks()); } + + public static class RangeList { + private final ArrayList delegate = new ArrayList<>(); + private int remaining; + + public RangeList() { + this(MAX_RANGE_COUNT); + } + + public RangeList(final int maxSize) { + this.remaining = maxSize; + } + + public boolean add(final Range item) { + if (remaining > 0 && delegate.add(item)) { + remaining--; + return true; + } + return false; + } + + public boolean isEmpty() { + return delegate.isEmpty(); + } + + public boolean isFull() { + return remaining == 0; + } + + public Range[] toArray() { + return delegate.toArray(new Range[0]); + } + } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedMap.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedMap.java index fa690a22ded..79f91c18a6a 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedMap.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedMap.java @@ -223,7 +223,7 @@ private int remove(final TaintedObject entry) { if (cur == null) { return 0; } - for (TaintedObject prev = cur.next; cur != null; prev = cur, cur = cur.next) { + for (TaintedObject prev = cur.next; cur != null && prev != null; prev = cur, cur = cur.next) { if (cur == entry) { prev.next = cur.next; return 1; @@ -259,7 +259,7 @@ private int index(int h) { private Iterator iterator(final int start, final int stop) { return new Iterator() { int currentIndex = start; - TaintedObject currentSubPos; + @Nullable TaintedObject currentSubPos; @Override public boolean hasNext() { @@ -294,6 +294,8 @@ public TaintedObject next() { }; } + @Nonnull + @Override public Iterator iterator() { return iterator(0, table.length); } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedObject.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedObject.java index 3e5ebd3a209..84d3b9c6091 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedObject.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedObject.java @@ -3,14 +3,18 @@ import static com.datadog.iast.taint.TaintedMap.POSITIVE_MASK; import com.datadog.iast.model.Range; +import datadog.trace.api.Config; import java.lang.ref.ReferenceQueue; import java.lang.ref.WeakReference; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class TaintedObject extends WeakReference { + + public static final int MAX_RANGE_COUNT = Config.get().getIastMaxRangeCount(); + final int positiveHashCode; - TaintedObject next; + @Nullable TaintedObject next; private Range[] ranges; public TaintedObject( @@ -19,7 +23,13 @@ public TaintedObject( final @Nullable ReferenceQueue queue) { super(obj, queue); this.positiveHashCode = System.identityHashCode(obj) & POSITIVE_MASK; - this.ranges = ranges; + // ensure ranges never go over the limit + if (ranges.length > MAX_RANGE_COUNT) { + this.ranges = new Range[MAX_RANGE_COUNT]; + System.arraycopy(ranges, 0, this.ranges, 0, MAX_RANGE_COUNT); + } else { + this.ranges = ranges; + } } /** diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedObjects.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedObjects.java index 890b9cfd6d8..ab3b20e0401 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedObjects.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/TaintedObjects.java @@ -1,13 +1,10 @@ package com.datadog.iast.taint; -import static com.datadog.iast.model.Range.NOT_MARKED; import static datadog.trace.api.ConfigDefaults.DEFAULT_IAST_MAX_CONCURRENT_REQUESTS; import static java.util.Collections.emptyIterator; -import com.datadog.iast.IastRequestContext; import com.datadog.iast.IastSystem; import com.datadog.iast.model.Range; -import com.datadog.iast.model.Source; import com.datadog.iast.model.json.TaintedObjectEncoding; import datadog.trace.api.Config; import java.util.ArrayList; @@ -16,17 +13,17 @@ import java.util.UUID; import java.util.concurrent.ArrayBlockingQueue; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +@SuppressWarnings("UnusedReturnValue") public interface TaintedObjects extends Iterable { - TaintedObject taintInputString(@Nonnull String obj, @Nonnull Source source, int mark); - - TaintedObject taintInputObject(@Nonnull Object obj, @Nonnull Source source, int mark); - + @Nullable TaintedObject taint(@Nonnull Object obj, @Nonnull Range[] ranges); + @Nullable TaintedObject get(@Nonnull Object obj); void release(); @@ -37,14 +34,6 @@ public interface TaintedObjects extends Iterable { boolean isFlat(); - default TaintedObject taintInputString(@Nonnull String obj, @Nonnull Source source) { - return taintInputString(obj, source, NOT_MARKED); - } - - default TaintedObject taintInputObject(@Nonnull Object obj, @Nonnull Source source) { - return taintInputObject(obj, source, NOT_MARKED); - } - static TaintedObjects acquire() { TaintedObjectsImpl taintedObjects = TaintedObjectsImpl.pool.poll(); if (taintedObjects == null) { @@ -53,22 +42,6 @@ static TaintedObjects acquire() { return IastSystem.DEBUG ? new TaintedObjectsDebugAdapter(taintedObjects) : taintedObjects; } - static TaintedObjects activeTaintedObjects(boolean lazy) { - if (lazy) { - return new LazyTaintedObjects(); - } else { - final IastRequestContext ctx = IastRequestContext.get(); - if (ctx != null) { - return ctx.getTaintedObjects(); - } - return null; - } - } - - static TaintedObjects activeTaintedObjects() { - return activeTaintedObjects(false); - } - class TaintedObjectsImpl implements TaintedObjects { private static final ArrayBlockingQueue pool = @@ -86,24 +59,6 @@ private TaintedObjectsImpl(final @Nonnull TaintedMap map) { this.map = map; } - @Override - public TaintedObject taintInputString( - final @Nonnull String obj, final @Nonnull Source source, final int mark) { - final TaintedObject tainted = - new TaintedObject(obj, Ranges.forString(obj, source, mark), map.getReferenceQueue()); - map.put(tainted); - return tainted; - } - - @Override - public TaintedObject taintInputObject( - @Nonnull Object obj, @Nonnull Source source, final int mark) { - final TaintedObject tainted = - new TaintedObject(obj, Ranges.forObject(source, mark), map.getReferenceQueue()); - map.put(tainted); - return tainted; - } - @Override public TaintedObject taint(final @Nonnull Object obj, final @Nonnull Range[] ranges) { final TaintedObject tainted = new TaintedObject(obj, ranges, map.getReferenceQueue()); @@ -111,6 +66,7 @@ public TaintedObject taint(final @Nonnull Object obj, final @Nonnull Range[] ran return tainted; } + @Nullable @Override public TaintedObject get(final @Nonnull Object obj) { return map.get(obj); @@ -137,13 +93,14 @@ public boolean isFlat() { return map.isFlat(); } + @Nonnull @Override public Iterator iterator() { return map.iterator(); } } - class TaintedObjectsDebugAdapter implements TaintedObjects { + final class TaintedObjectsDebugAdapter implements TaintedObjects { static final Logger LOGGER = LoggerFactory.getLogger(TaintedObjects.class); private final TaintedObjectsImpl delegated; @@ -155,22 +112,7 @@ public TaintedObjectsDebugAdapter(final TaintedObjectsImpl delegated) { LOGGER.debug("new: id={}", id); } - @Override - public TaintedObject taintInputString( - final @Nonnull String obj, final @Nonnull Source source, final int mark) { - final TaintedObject tainted = delegated.taintInputString(obj, source, mark); - logTainted(tainted); - return tainted; - } - - @Override - public TaintedObject taintInputObject( - @Nonnull Object obj, @Nonnull Source source, final int mark) { - final TaintedObject tainted = delegated.taintInputObject(obj, source, mark); - logTainted(tainted); - return tainted; - } - + @Nullable @Override public TaintedObject taint(final @Nonnull Object obj, final @Nonnull Range[] ranges) { final TaintedObject tainted = delegated.taint(obj, ranges); @@ -178,6 +120,7 @@ public TaintedObject taint(final @Nonnull Object obj, final @Nonnull Range[] ran return tainted; } + @Nullable @Override public TaintedObject get(final @Nonnull Object obj) { return delegated.get(obj); @@ -214,6 +157,7 @@ public boolean isFlat() { return delegated.isFlat(); } + @Nonnull @Override public Iterator iterator() { return delegated.iterator(); @@ -230,74 +174,44 @@ private void logTainted(final TaintedObject tainted) { } } - class LazyTaintedObjects implements TaintedObjects { - private boolean fetched = false; - private TaintedObjects taintedObjects; - - @Override - public TaintedObject taintInputString( - @Nonnull final String obj, @Nonnull final Source source, final int mark) { - final TaintedObjects to = getTaintedObjects(); - return to == null ? null : to.taintInputString(obj, source, mark); - } + final class NoOp implements TaintedObjects { - @Override - public TaintedObject taintInputObject( - @Nonnull final Object obj, @Nonnull final Source source, final int mark) { - final TaintedObjects to = getTaintedObjects(); - return to == null ? null : to.taintInputObject(obj, source, mark); - } + public static final TaintedObjects INSTANCE = new NoOp(); + @Nullable @Override public TaintedObject taint(@Nonnull final Object obj, @Nonnull final Range[] ranges) { - final TaintedObjects to = getTaintedObjects(); - return to == null ? null : to.taint(obj, ranges); + return null; } + @Nullable @Override public TaintedObject get(@Nonnull final Object obj) { - final TaintedObjects to = getTaintedObjects(); - return to == null ? null : to.get(obj); + return null; } @Override - public void release() { - final TaintedObjects to = getTaintedObjects(); - if (to != null) { - to.release(); - } - } + public void release() {} @Override - public Iterator iterator() { - final TaintedObjects to = getTaintedObjects(); - return to != null ? to.iterator() : emptyIterator(); + public boolean isFlat() { + return false; } @Override - public int getEstimatedSize() { - final TaintedObjects to = getTaintedObjects(); - return to != null ? to.getEstimatedSize() : 0; + public int count() { + return 0; } @Override - public boolean isFlat() { - final TaintedObjects to = getTaintedObjects(); - return to != null && to.isFlat(); + public int getEstimatedSize() { + return 0; } @Override - public int count() { - final TaintedObjects to = getTaintedObjects(); - return to != null ? to.count() : 0; - } - - private TaintedObjects getTaintedObjects() { - if (!fetched) { - fetched = true; - taintedObjects = activeTaintedObjects(); - } - return taintedObjects; + @Nonnull + public Iterator iterator() { + return emptyIterator(); } } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/Tainteds.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/Tainteds.java index 142249ec1fe..422f677d553 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/Tainteds.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/taint/Tainteds.java @@ -1,17 +1,21 @@ package com.datadog.iast.taint; import java.util.Collection; +import javax.annotation.Nonnull; import javax.annotation.Nullable; +import org.jetbrains.annotations.Contract; /** Utilitiles to work with {@link TaintedObject} */ public final class Tainteds { private Tainteds() {} + @Contract("null -> false") public static boolean canBeTainted(@Nullable final CharSequence s) { return s != null && s.length() > 0; } + @Contract("null -> false") public static boolean canBeTainted(@Nullable final E[] e) { if (e == null || e.length == 0) { return false; @@ -24,6 +28,7 @@ public static boolean canBeTainted(@Nullable final E[] return false; } + @Contract("null -> false") public static boolean canBeTainted(@Nullable final Collection e) { if (e == null || e.isEmpty()) { return false; @@ -36,7 +41,9 @@ public static boolean canBeTainted(@Nullable final Coll return false; } - public static TaintedObject getTainted(final TaintedObjects to, final Object value) { + @Nullable + public static TaintedObject getTainted( + @Nonnull final TaintedObjects to, @Nullable final Object value) { return value == null ? null : to.get(value); } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/telemetry/TelemetryRequestStartedHandler.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/telemetry/TelemetryRequestStartedHandler.java index 2af4532a727..5e647355b09 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/telemetry/TelemetryRequestStartedHandler.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/telemetry/TelemetryRequestStartedHandler.java @@ -1,6 +1,6 @@ package com.datadog.iast.telemetry; -import com.datadog.iast.HasDependencies.Dependencies; +import com.datadog.iast.Dependencies; import com.datadog.iast.IastRequestContext; import com.datadog.iast.RequestStartedHandler; import com.datadog.iast.taint.TaintedObjects; @@ -23,6 +23,10 @@ protected IastRequestContext newContext() { final TaintedObjects taintedObjects = TaintedObjectsWithTelemetry.build(verbosity, TaintedObjects.acquire()); final IastMetricCollector collector = new IastMetricCollector(); - return new IastRequestContext(taintedObjects, collector); + final IastRequestContext ctx = new IastRequestContext(taintedObjects, collector); + if (taintedObjects instanceof TaintedObjectsWithTelemetry) { + ((TaintedObjectsWithTelemetry) taintedObjects).initContext(ctx); + } + return ctx; } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/telemetry/taint/TaintedObjectsWithTelemetry.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/telemetry/taint/TaintedObjectsWithTelemetry.java index 9bc900d3ea2..1ac059b0b54 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/telemetry/taint/TaintedObjectsWithTelemetry.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/telemetry/taint/TaintedObjectsWithTelemetry.java @@ -4,17 +4,15 @@ import static datadog.trace.api.iast.telemetry.IastMetric.REQUEST_TAINTED; import static datadog.trace.api.iast.telemetry.IastMetric.TAINTED_FLAT_MODE; +import com.datadog.iast.IastRequestContext; import com.datadog.iast.model.Range; -import com.datadog.iast.model.Source; import com.datadog.iast.taint.TaintedObject; import com.datadog.iast.taint.TaintedObjects; -import datadog.trace.api.gateway.RequestContext; import datadog.trace.api.iast.telemetry.IastMetricCollector; import datadog.trace.api.iast.telemetry.Verbosity; -import datadog.trace.bootstrap.instrumentation.api.AgentSpan; -import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import java.util.Iterator; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class TaintedObjectsWithTelemetry implements TaintedObjects { @@ -33,42 +31,32 @@ public static TaintedObjects build( private final TaintedObjects delegate; private final boolean debug; - private volatile RequestContext ctx; + @Nullable private IastRequestContext ctx; protected TaintedObjectsWithTelemetry(final boolean debug, final TaintedObjects delegate) { this.delegate = delegate; this.debug = debug; } - @Override - public TaintedObject taintInputString( - @Nonnull String obj, @Nonnull Source source, final int mark) { - final TaintedObject result = delegate.taintInputString(obj, source, mark); - if (debug) { - IastMetricCollector.add(EXECUTED_TAINTED, 1, getRequestContext()); - } - return result; + /** + * {@link IastRequestContext} depends on {@link TaintedObjects} so it cannot be initialized via + * ctor + */ + public void initContext(final IastRequestContext ctx) { + this.ctx = ctx; } + @Nullable @Override public TaintedObject taint(@Nonnull Object obj, @Nonnull Range[] ranges) { final TaintedObject result = delegate.taint(obj, ranges); if (debug) { - IastMetricCollector.add(EXECUTED_TAINTED, 1, getRequestContext()); - } - return result; - } - - @Override - public TaintedObject taintInputObject( - @Nonnull Object obj, @Nonnull Source source, final int mark) { - final TaintedObject result = delegate.taintInputObject(obj, source, mark); - if (debug) { - IastMetricCollector.add(EXECUTED_TAINTED, 1, getRequestContext()); + IastMetricCollector.add(EXECUTED_TAINTED, 1, ctx); } return result; } + @Nullable @Override public TaintedObject get(@Nonnull Object obj) { return delegate.get(obj); @@ -77,16 +65,16 @@ public TaintedObject get(@Nonnull Object obj) { @Override public void release() { try { - final RequestContext reqCtx = getRequestContext(); if (delegate.isFlat()) { - IastMetricCollector.add(TAINTED_FLAT_MODE, 1, reqCtx); + IastMetricCollector.add(TAINTED_FLAT_MODE, 1, ctx); } - IastMetricCollector.add(REQUEST_TAINTED, computeSize(), reqCtx); + IastMetricCollector.add(REQUEST_TAINTED, computeSize(), ctx); } finally { delegate.release(); } } + @Nonnull @Override public Iterator iterator() { return delegate.iterator(); @@ -111,16 +99,4 @@ private int computeSize() { int size = getEstimatedSize(); return size > COUNT_THRESHOLD ? size : count(); } - - /** - * A {@link TaintedObjects} data structure is always linked to a {@link RequestContext} so it's - * actually OK to cache the result. - */ - protected RequestContext getRequestContext() { - if (ctx == null) { - final AgentSpan span = AgentTracer.activeSpan(); - ctx = span == null ? null : span.getRequestContext(); - } - return ctx; - } } diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/CookieSecurityParser.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/CookieSecurityParser.java index 54dec1ec58a..bc238eac288 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/CookieSecurityParser.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/CookieSecurityParser.java @@ -5,8 +5,10 @@ import datadog.trace.api.iast.util.Cookie; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.NoSuchElementException; import java.util.StringTokenizer; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,6 +46,7 @@ public static List parse(final String cookieString) { } } + @Nullable private static Cookie parseInternal(final String header) { String cookieName; boolean httpOnly = false; @@ -111,7 +114,7 @@ private static List splitMultiCookies(final String header) { } private static int guessCookieVersion(String header) { - header = header.toLowerCase(); + header = header.toLowerCase(Locale.ROOT); if (header.contains("expires=")) { // only netscape cookie using 'expires' return 0; diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/HttpHeader.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/HttpHeader.java new file mode 100644 index 00000000000..5d58ea8a617 --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/HttpHeader.java @@ -0,0 +1,88 @@ +package com.datadog.iast.util; + +import com.datadog.iast.IastRequestContext; +import java.util.Locale; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nullable; + +public class HttpHeader { + + final String name; + + HttpHeader(final String name) { + this.name = name.toLowerCase(Locale.ROOT); + } + + public boolean matches(final String name) { + return this.name.equalsIgnoreCase(name); + } + + @Nullable + public static HttpHeader from(final String name) { + return Values.HEADERS.get(name.toLowerCase(Locale.ROOT)); + } + + public abstract static class ContextAwareHeader extends HttpHeader { + + ContextAwareHeader(final String name) { + super(name); + } + + public abstract void onHeader(final IastRequestContext ctx, final String value); + } + + public static final class Values { + + public static final HttpHeader X_FORWARDED_PROTO = + new ContextAwareHeader("X-Forwarded-Proto") { + + @Override + public void onHeader(final IastRequestContext ctx, final String value) { + ctx.setxForwardedProto(value); + } + }; + public static final HttpHeader SET_COOKIE = new HttpHeader("Set-Cookie"); + public static final HttpHeader STRICT_TRANSPORT_SECURITY = + new ContextAwareHeader("Strict-Transport-Security") { + @Override + public void onHeader(final IastRequestContext ctx, final String value) { + ctx.setStrictTransportSecurity(value); + } + }; + public static final HttpHeader CONTENT_TYPE = + new ContextAwareHeader("Content-Type") { + @Override + public void onHeader(final IastRequestContext ctx, final String value) { + ctx.setContentType(value); + } + }; + public static final HttpHeader X_CONTENT_TYPE_OPTIONS = + new ContextAwareHeader("X-Content-Type-Options") { + @Override + public void onHeader(final IastRequestContext ctx, final String value) { + ctx.setxContentTypeOptions(value); + } + }; + public static final HttpHeader LOCATION = new HttpHeader("Location"); + public static final HttpHeader REFERER = new HttpHeader("Referer"); + + /** Faster lookup for headers */ + static final Map HEADERS; + + static { + HEADERS = + Stream.of( + Values.X_FORWARDED_PROTO, + Values.SET_COOKIE, + Values.STRICT_TRANSPORT_SECURITY, + Values.CONTENT_TYPE, + Values.X_CONTENT_TYPE_OPTIONS, + Values.LOCATION, + Values.REFERER) + .collect(Collectors.toMap(header -> header.name, Function.identity())); + } + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/ObjectVisitor.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/ObjectVisitor.java new file mode 100644 index 00000000000..01f5e568061 --- /dev/null +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/ObjectVisitor.java @@ -0,0 +1,249 @@ +package com.datadog.iast.util; + +import static com.datadog.iast.util.ObjectVisitor.State.CONTINUE; +import static com.datadog.iast.util.ObjectVisitor.State.EXIT; + +import datadog.trace.api.Platform; +import datadog.trace.instrumentation.iastinstrumenter.IastExclusionTrie; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.Collections; +import java.util.IdentityHashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.Predicate; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@SuppressWarnings("JavaReflectionMemberAccess") +public class ObjectVisitor { + + private static final Logger LOGGER = LoggerFactory.getLogger(ObjectVisitor.class); + + private static final int MAX_VISITED_OBJECTS = 1000; + private static final int MAX_DEPTH = 10; + @Nullable private static final Method TRY_SET_ACCESSIBLE; + + static { + TRY_SET_ACCESSIBLE = fetchTrySetAccessibleMethod(); + } + + public static void visit(@Nonnull final Object object, @Nonnull final Visitor visitor) { + visit(object, visitor, ObjectVisitor::inspectClass); + } + + public static void visit( + @Nonnull final Object object, + @Nonnull final Visitor visitor, + @Nonnull final Predicate> classFilter) { + visit(object, visitor, classFilter, MAX_DEPTH, MAX_VISITED_OBJECTS); + } + + public static void visit( + @Nonnull final Object object, + @Nonnull final Visitor visitor, + final int maxDepth, + final int maxObjects) { + visit(object, visitor, ObjectVisitor::inspectClass, maxDepth, maxObjects); + } + + public static void visit( + @Nonnull final Object object, + @Nonnull final Visitor visitor, + @Nonnull final Predicate> classFilter, + final int maxDepth, + final int maxObjects) { + new ObjectVisitor(classFilter, maxDepth, maxObjects, visitor).visit(0, "root", object); + } + + private int remaining; + private final int maxDepth; + private final Set visited; + private final Visitor visitor; + private final Predicate> classFilter; + + private ObjectVisitor( + final Predicate> classFilter, + final int maxDepth, + final int maxObjects, + final Visitor visitor) { + this.maxDepth = maxDepth; + this.remaining = maxObjects; + this.visited = Collections.newSetFromMap(new IdentityHashMap<>()); + this.visitor = visitor; + this.classFilter = classFilter; + } + + private State visit(final int depth, final String path, final Object value) { + if (remaining <= 0) { + return EXIT; + } + remaining--; + if (depth > maxDepth) { + return CONTINUE; + } + if (!visited.add(value)) { + return CONTINUE; + } + State state = CONTINUE; + try { + if (value instanceof Object[]) { + state = visitArray(depth, path, (Object[]) value); + } else if (value instanceof Map) { + state = visitMap(depth, path, (Map) value); + } else if (value instanceof Iterable) { + state = visitIterable(depth, path, (Iterable) value); + } else { + state = visitObject(depth, path, value); + } + } catch (final Throwable e) { + LOGGER.debug("Failed to visit object of type {}", value.getClass(), e); + } + return state; + } + + private State visitArray(final int depth, final String path, final Object[] array) { + final int arrayDepth = depth + 1; + for (int i = 0; i < array.length; i++) { + final Object item = array[i]; + if (item != null) { + final String itemPath = path + "[" + i + "]"; + final State state = visit(arrayDepth, itemPath, item); + if (state != CONTINUE) { + return state; + } + } + } + return CONTINUE; + } + + private State visitMap(final int depth, final String path, final Map map) { + final int mapDepth = depth + 1; + for (final Map.Entry entry : map.entrySet()) { + final Object key = entry.getKey(); + if (key != null) { + final String keyPath = path + "[]"; + final ObjectVisitor.State state = visit(mapDepth, keyPath, key); + if (state != CONTINUE) { + return state; + } + } + final Object item = entry.getValue(); + if (item != null) { + final String itemPath = path + "[" + key + "]"; + final State state = visit(mapDepth, itemPath, item); + if (state != CONTINUE) { + return state; + } + } + } + return CONTINUE; + } + + private State visitIterable(final int depth, final String path, final Iterable iterable) { + final int iterableDepth = depth + 1; + int index = 0; + for (final Object item : iterable) { + if (item != null) { + final String itemPath = path + "[" + (index++) + "]"; + final State state = visit(iterableDepth, itemPath, item); + if (state != CONTINUE) { + return state; + } + } + } + return CONTINUE; + } + + private State visitObject(final int depth, final String path, final Object value) { + final int childDepth = depth + 1; + State state = visitor.visit(path, value); + if (state != State.CONTINUE || !classFilter.test(value.getClass())) { + return state; + } + Class klass = value.getClass(); + while (klass != Object.class) { + for (final Field field : klass.getDeclaredFields()) { + try { + if (inspectField(field) && trySetAccessible(field)) { + final Object fieldValue = field.get(value); + if (fieldValue != null) { + final String fieldPath = path + "." + field.getName(); + state = visit(childDepth, fieldPath, fieldValue); + if (state != CONTINUE) { + return state; + } + } + } + } catch (final Throwable e) { + LOGGER.debug("Unable to get field {}", field, e); + } + } + klass = klass.getSuperclass(); + } + return ObjectVisitor.State.CONTINUE; + } + + public static boolean inspectClass(final Class cls) { + if (cls.isPrimitive()) { + return false; // skip primitives + } + return IastExclusionTrie.apply(cls.getName()) < 1; + } + + private static boolean inspectField(final Field field) { + final int modifiers = field.getModifiers(); + if (Modifier.isStatic(modifiers)) { + return false; + } + final String fieldName = field.getName(); + if ("this$0".equals(fieldName)) { + return false; // skip back references from inner class + } + final Class fieldType = field.getType(); + if ("groovy.lang.MetaClass".equals(fieldType.getName())) { + return false; // skip the whole groovy MOP + } + return true; + } + + @Nullable + private static Method fetchTrySetAccessibleMethod() { + Method method = null; + if (Platform.isJavaVersionAtLeast(9)) { + try { + method = Field.class.getMethod("trySetAccessible"); + } catch (NoSuchMethodException e) { + LOGGER.warn("Can't get method 'Field.trySetAccessible'", e); + } + } + return method; + } + + private static boolean trySetAccessible(final Field field) { + try { + if (TRY_SET_ACCESSIBLE != null) { + return (boolean) TRY_SET_ACCESSIBLE.invoke(field); + } + field.setAccessible(true); + return true; + } catch (RuntimeException | IllegalAccessException | InvocationTargetException e) { + LOGGER.debug("Unable to make field accessible", e); + return false; + } + } + + public interface Visitor { + @Nonnull + State visit(@Nonnull String path, @Nonnull Object value); + } + + public enum State { + CONTINUE, + EXIT + } +} diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/Ranged.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/Ranged.java index ed4ef8135f0..645fb58cfdc 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/Ranged.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/Ranged.java @@ -5,6 +5,7 @@ import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; public interface Ranged { @@ -62,6 +63,7 @@ default List remove(final Ranged range) { } /** Computes the intersection of the ranges or {@code null} if they do not intersect */ + @Nullable default Ranged intersection(final Ranged range) { if (this.getStart() == range.getStart() && this.getLength() == range.getLength()) { return this; @@ -84,15 +86,19 @@ default Ranged intersection(final Ranged range) { } } - default boolean isBefore(final Ranged range) { + default boolean isBefore(@Nullable final Ranged range) { if (range == null) { return true; } - return getStart() <= range.getStart(); + final int offset = getStart() - range.getStart(); + if (offset == 0) { + return getLength() <= range.getLength(); // put smaller ranges first + } + return offset < 0; } - static Ranged build(int start, int end) { - return new RangedImpl(start, end); + static Ranged build(int start, int length) { + return new RangedImpl(start, length); } class RangedImpl implements Ranged { diff --git a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/RangedDeque.java b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/RangedDeque.java index faef93afde0..4e66a2bf5a1 100644 --- a/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/RangedDeque.java +++ b/dd-java-agent/agent-iast/src/main/java/com/datadog/iast/util/RangedDeque.java @@ -9,8 +9,10 @@ /** */ public interface RangedDeque { + @Nullable E poll(); + @Nullable E peek(); void addFirst(@Nonnull E item); @@ -31,8 +33,9 @@ abstract class BaseRangedDequeue implements RangedDeque { private final Deque head = new LinkedList<>(); - protected E next; + @Nullable protected E next; + @Nullable @Override public final E poll() { final E result = next; @@ -40,6 +43,7 @@ public final E poll() { return result; } + @Nullable @Override public final E peek() { return next; @@ -58,15 +62,18 @@ public final boolean isEmpty() { return next == null; } + @Nullable protected final E fetchNext() { return head.isEmpty() ? internalPoll() : head.poll(); } + @Nullable protected abstract E internalPoll(); } class EmptyRangedDequeue extends BaseRangedDequeue { + @Nullable @Override protected E internalPoll() { return null; @@ -82,6 +89,7 @@ class TokenizerQueue extends BaseRangedDequeue { next = fetchNext(); } + @Nullable @Override protected Ranged internalPoll() { return tokenizer.next() ? tokenizer.current() : null; @@ -99,6 +107,7 @@ class ArrayQueue extends BaseRangedDequeue { next = fetchNext(); } + @Nullable @Override protected E internalPoll() { return index >= array.length ? null : array[index++]; diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/GrpcRequestMessageHandlerTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/GrpcRequestMessageHandlerTest.groovy new file mode 100644 index 00000000000..e4081f96d7d --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/GrpcRequestMessageHandlerTest.groovy @@ -0,0 +1,124 @@ +package com.datadog.iast + +import com.datadog.iast.protobuf.Test2 +import com.datadog.iast.protobuf.Test3 +import com.datadog.iast.util.ObjectVisitor +import datadog.trace.api.gateway.RequestContext +import datadog.trace.api.gateway.RequestContextSlot +import datadog.trace.api.iast.InstrumentationBridge +import datadog.trace.api.iast.SourceTypes +import datadog.trace.api.iast.propagation.PropagationModule +import datadog.trace.test.util.DDSpecification +import foo.bar.VisitableClass + +import java.util.function.Predicate + +import static com.datadog.iast.util.ObjectVisitor.State.CONTINUE + +class GrpcRequestMessageHandlerTest extends DDSpecification { + + private PropagationModule propagation + private IastRequestContext iastCtx + private RequestContext ctx + + void setup() { + propagation = Mock(PropagationModule) + InstrumentationBridge.registerIastModule(propagation) + iastCtx = Mock(IastRequestContext) + ctx = Mock(RequestContext) { + getData(RequestContextSlot.IAST) >> iastCtx + } + } + + void 'the handler does nothing without propagation'() { + given: + final handler = new GrpcRequestMessageHandler() + InstrumentationBridge.clearIastModules() + + when: + handler.apply(ctx, [:]) + + then: + 0 * _ + } + + void 'the handler does nothing with null values'() { + given: + final handler = new GrpcRequestMessageHandler() + + when: + handler.apply(ctx, null) + + then: + 0 * _ + } + + void 'the handler forwards objects to the propagation module'() { + given: + final target = [:] + final handler = new GrpcRequestMessageHandler() + + when: + handler.apply(ctx, target) + + then: + 1 * propagation.taintDeeply(iastCtx, target, SourceTypes.GRPC_BODY, _ as Predicate>) + } + + void 'the handler only takes into account protobuf v.#protobufVersion related messages'() { + given: + final visitor = Mock(ObjectVisitor.Visitor) { + visit(_ as String, _ as Object) >> { + println 'feo' + return CONTINUE + } + } + final url = 'https://dd.datad0g.com/' + final nonProtobufMessage = new VisitableClass(name: 'test') + final filter = GrpcRequestMessageHandler::isProtobufArtifact + + when: 'the message is not a protobuf instance' + ObjectVisitor.visit(nonProtobufMessage, visitor, filter) + + then: 'only the root object is visited' + 1 * visitor.visit('root', nonProtobufMessage) >> CONTINUE + 0 * visitor._ + + when: 'the message is a protobuf message' + ObjectVisitor.visit(protobufMessage, visitor, filter) + + then: 'all the properties are visited' + 1 * visitor.visit('root', protobufMessage) >> CONTINUE + 1 * visitor.visit('root.child_.optional_', 'optional') >> CONTINUE + 1 * visitor.visit('root.child_.required_', 'required') >> CONTINUE + 1 * visitor.visit('root.child_.repeated_[0]', 'repeated0') >> CONTINUE + 1 * visitor.visit('root.child_.repeated_[1]', 'repeated1') >> CONTINUE + // for maps we go inside com.google.protobuf.MapField and extract properties + 1 * visitor.visit('root.child_.map_.mapData[]', 'key') >> CONTINUE + 1 * visitor.visit('root.child_.map_.mapData[key]', 'value') >> CONTINUE + + where: + protobufVersion << ['2', '3'] + protobufMessage << [buildProto2Message(), buildProto3Message()] + } + + private static def buildProto2Message() { + final child = Test2.Proto2Child.newBuilder() + .setOptional("optional") + .setRequired("required") + .addAllRepeated(Arrays.asList('repeated0', 'repeated1')) + .putMap('key', 'value') + .build() + return Test2.Proto2Parent.newBuilder().setChild(child).build() + } + + private static def buildProto3Message() { + final child = Test3.Proto3Child.newBuilder() + .setOptional("optional") + .setRequired("required") + .addAllRepeated(Arrays.asList('repeated0', 'repeated1')) + .putMap('key', 'value') + .build() + return Test3.Proto3Parent.newBuilder().setChild(child).build() + } +} diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/IastModuleImplTestBase.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/IastModuleImplTestBase.groovy index d47ce8e0a14..9ad167ed6c6 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/IastModuleImplTestBase.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/IastModuleImplTestBase.groovy @@ -1,6 +1,5 @@ package com.datadog.iast -import com.datadog.iast.HasDependencies.Dependencies import com.datadog.iast.overhead.Operation import com.datadog.iast.overhead.OverheadController import datadog.trace.api.Config @@ -25,6 +24,8 @@ class IastModuleImplTestBase extends DDSpecification { // TODO replace by mock an fix all mock assertions (0 * _ will usually fail) protected StackWalker stackWalker = StackWalkerFactory.INSTANCE + protected Dependencies dependencies = new Dependencies(Config.get(), reporter, overheadController, stackWalker) + void setup() { AgentTracer.forceRegister(tracer) overheadController.acquireRequest() >> true @@ -34,9 +35,4 @@ class IastModuleImplTestBase extends DDSpecification { void cleanup() { AgentTracer.forceRegister(ORIGINAL_TRACER) } - - protected E registerDependencies(final E module) { - module.registerDependencies(new Dependencies(Config.get(), reporter, overheadController, stackWalker)) - return module - } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/IastSystemTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/IastSystemTest.groovy index f6b610bc45b..e5374552b8c 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/IastSystemTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/IastSystemTest.groovy @@ -1,5 +1,6 @@ package com.datadog.iast +import datadog.trace.api.iast.InstrumentationBridge import datadog.trace.api.internal.TraceSegment import datadog.trace.api.gateway.InstrumentationGateway import datadog.trace.api.gateway.RequestContextSlot @@ -11,6 +12,10 @@ import datadog.trace.test.util.DDSpecification class IastSystemTest extends DDSpecification { + def setup() { + InstrumentationBridge.clearIastModules() + } + void 'start'() { given: final ig = new InstrumentationGateway() @@ -33,6 +38,8 @@ class IastSystemTest extends DDSpecification { then: 1 * ss.registerCallback(Events.get().requestStarted(), _) 1 * ss.registerCallback(Events.get().requestEnded(), _) + 1 * ss.registerCallback(Events.get().requestHeader(), _) + 1 * ss.registerCallback(Events.get().grpcServerRequestMessage(), _) 0 * _ when: @@ -52,6 +59,8 @@ class IastSystemTest extends DDSpecification { 1 * iastContext.getTaintedObjects() 1 * iastContext.getMetricCollector() 1 * traceSegment.setTagTop('_dd.iast.enabled', 1) + 1 * iastContext.getxContentTypeOptions() >> 'nosniff' + 1 * iastContext.getStrictTransportSecurity() >> 'max-age=35660' 0 * _ noExceptionThrown() } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/ReporterTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/ReporterTest.groovy index 0f5de3915e6..b44307568bd 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/ReporterTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/ReporterTest.groovy @@ -52,10 +52,10 @@ class ReporterTest extends DDSpecification { span.getSpanId() >> spanId final v = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(spanId, new StackTraceElement("foo", "foo", "foo", 1)), - new Evidence("MD5") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("MD5") + ) when: reporter.report(span, v) @@ -97,15 +97,15 @@ class ReporterTest extends DDSpecification { span.getSpanId() >> spanId final v1 = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(spanId, new StackTraceElement("foo", "foo", "foo", 1)), - new Evidence("MD5") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("MD5") + ) final v2 = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(spanId, new StackTraceElement("foo", "foo", "foo", 2)), - new Evidence("MD4") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement("foo", "foo", "foo", 2)), + new Evidence("MD4") + ) when: reporter.report(span, v1) @@ -148,33 +148,77 @@ class ReporterTest extends DDSpecification { final tracerAPI = Mock(TracerAPI) AgentTracer.forceRegister(tracerAPI) final spanId = 12345L + final serviceName = 'service-name' final span = Mock(AgentSpan) final scope = Mock(AgentScope) final ctx = new IastRequestContext() final reqCtx = Stub(RequestContext) reqCtx.getData(RequestContextSlot.IAST) >> ctx final reporter = new Reporter() - final v = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(0, new StackTraceElement("foo", "foo", "foo", 1)), - new Evidence("MD5") - ) + final hash = v.getHash() when: reporter.report(null, v) - v.getLocation().getSpanId() == spanId then: noExceptionThrown() 1 * tracerAPI.startSpan('iast', 'vulnerability', _ as AgentSpan.Context) >> span 1 * tracerAPI.activateSpan(span, ScopeSource.MANUAL) >> scope - 1 * span.getSpanId() >> spanId 1 * span.getRequestContext() >> reqCtx 1 * span.setSpanType(InternalSpanTypes.VULNERABILITY) >> span 1 * span.setTag(ANALYZED.key(), ANALYZED.value()) + 1 * span.getServiceName() >> serviceName + 1 * span.getSpanId() >> spanId 1 * span.finish() 1 * scope.close() 0 * _ + + when: + def newSpanId = null + def newServiceName = null + if(v.getType() instanceof VulnerabilityType.HeaderVulnerabilityType){ + newServiceName = v.getLocation().getServiceName() + }else{ + newSpanId = v.getLocation().getSpanId() + } + def newHash = v.getHash() + + then: + if(v.getType() instanceof VulnerabilityType.HeaderVulnerabilityType){ + assert newServiceName == serviceName + assert newHash != hash + }else{ + assert newSpanId == spanId + assert newHash == hash + } + + where: + v | _ + defaultVulnerability() | _ + cookieVulnerability() | _ + headerVulnerability() | _ + } + + void 'no spans are create if duplicates are reported'() { + given: + final tracerAPI = Mock(TracerAPI) + AgentTracer.forceRegister(tracerAPI) + final ctx = new IastRequestContext() + final reqCtx = Stub(RequestContext) + reqCtx.getData(RequestContextSlot.IAST) >> ctx + final reporter = new Reporter((vul) -> true) + final v = new Vulnerability( + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(null, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("MD5") + ) + + when: + reporter.report(null, v) + + then: + noExceptionThrown() + 0 * _ } void 'null RequestContext does not throw'() { @@ -184,10 +228,10 @@ class ReporterTest extends DDSpecification { span.getRequestContext() >> null span.getSpanId() >> 12345L final v = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(0, new StackTraceElement("foo", "foo", "foo", 1)), - new Evidence("MD5") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(null, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("MD5") + ) when: reporter.report(span, v) @@ -208,10 +252,10 @@ class ReporterTest extends DDSpecification { span.getRequestContext() >> reqCtx span.getSpanId() >> spanId final v = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(spanId, new StackTraceElement("foo", "foo", "foo", 1)), - new Evidence("MD5") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("MD5") + ) when: reporter.report(span, v) @@ -225,16 +269,20 @@ class ReporterTest extends DDSpecification { void 'Vulnerabilities with same type and location are equals'() { given: + final span1 = Mock(AgentSpan) + span1.getSpanId() >> 123456 final vulnerability1 = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456, new StackTraceElement("foo", "foo", "foo", 1)), - new Evidence("GOOD") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span1, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("GOOD") + ) + final span2 = Mock(AgentSpan) + span1.getSpanId() >> 7890 final vulnerability2 = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(7890, new StackTraceElement("foo", "foo", "foo", 1)), - new Evidence("BAD") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span2, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("BAD") + ) expect: vulnerability1 == vulnerability2 @@ -242,16 +290,20 @@ class ReporterTest extends DDSpecification { void 'Vulnerabilities with same type and different location are not equals'() { given: + final span1 = Mock(AgentSpan) + span1.getSpanId() >> 123456 final vulnerability1 = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456, new StackTraceElement("foo", "foo", "foo", 1)), - new Evidence("GOOD") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span1, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("GOOD") + ) + final span2 = Mock(AgentSpan) + span1.getSpanId() >> 7890 final vulnerability2 = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(7890, new StackTraceElement("foo", "foo", "foo", 2)), - new Evidence("BAD") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span2, new StackTraceElement("foo", "foo", "foo", 2)), + new Evidence("BAD") + ) expect: vulnerability1 != vulnerability2 @@ -264,10 +316,10 @@ class ReporterTest extends DDSpecification { final batch = new VulnerabilityBatch() final span = spanWithBatch(batch) final vulnerability = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(span.spanId, new StackTraceElement("foo", "foo", "foo", 1)), - new Evidence("GOOD") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("GOOD") + ) when: 'first time a vulnerability is reported' reporter.report(span, vulnerability) @@ -289,10 +341,10 @@ class ReporterTest extends DDSpecification { final batch = new VulnerabilityBatch() final span = spanWithBatch(batch) final vulnerability = new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(span.spanId, new StackTraceElement("foo", "foo", "foo", 1)), - new Evidence("GOOD") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("GOOD") + ) when: 'first time a vulnerability is reported' reporter.report(span, vulnerability) @@ -316,10 +368,10 @@ class ReporterTest extends DDSpecification { final span = spanWithBatch(batch) final vulnerabilityBuilder = { int index -> new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(span.spanId, new StackTraceElement(index.toString(), index.toString(), index.toString(), index)), - new Evidence("GOOD") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement(index.toString(), index.toString(), index.toString(), index)), + new Evidence("GOOD") + ) } when: 'the deduplication cache is filled for the first time' @@ -356,10 +408,10 @@ class ReporterTest extends DDSpecification { final span = spanWithBatch(batch) final vulnerabilityBuilder = { int index -> new Vulnerability( - VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(span.spanId, new StackTraceElement(index.toString(), index.toString(), index.toString(), index)), - new Evidence("GOOD") - ) + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement(index.toString(), index.toString(), index.toString(), index)), + new Evidence("GOOD") + ) } when: 'a few duplicates are reported in a concurrent scenario' @@ -401,4 +453,25 @@ class ReporterTest extends DDSpecification { span.getSpanId() >> spanId return span } + + private Vulnerability defaultVulnerability(){ + return new Vulnerability( + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(null, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("MD5") + ) + } + + private Vulnerability cookieVulnerability(){ + return new Vulnerability( + VulnerabilityType.INSECURE_COOKIE, + Location.forSpanAndStack(null, new StackTraceElement("foo", "foo", "foo", 1)), + new Evidence("cookie-name") + ) + } + + private Vulnerability headerVulnerability(){ + return new Vulnerability( + VulnerabilityType.XCONTENTTYPE_HEADER_MISSING, Location.forSpan(null), null) + } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestEndedHandlerTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestEndedHandlerTest.groovy index f1d584b4eae..b325a373d90 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestEndedHandlerTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestEndedHandlerTest.groovy @@ -1,12 +1,12 @@ package com.datadog.iast -import com.datadog.iast.HasDependencies.Dependencies import com.datadog.iast.overhead.OverheadController import datadog.trace.api.Config import datadog.trace.api.gateway.Flow import datadog.trace.api.gateway.IGSpanInfo import datadog.trace.api.gateway.RequestContext import datadog.trace.api.gateway.RequestContextSlot +import datadog.trace.api.iast.InstrumentationBridge import datadog.trace.api.internal.TraceSegment import datadog.trace.test.util.DDSpecification import datadog.trace.util.stacktrace.StackWalker @@ -15,6 +15,10 @@ import groovy.transform.CompileDynamic @CompileDynamic class RequestEndedHandlerTest extends DDSpecification { + def setup() { + InstrumentationBridge.clearIastModules() + } + void 'request ends with IAST context'() { given: final OverheadController overheadController = Mock(OverheadController) diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestHeaderHandlerTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestHeaderHandlerTest.groovy new file mode 100644 index 00000000000..1b8c733eadb --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestHeaderHandlerTest.groovy @@ -0,0 +1,42 @@ +package com.datadog.iast + +import datadog.trace.api.gateway.RequestContext +import datadog.trace.api.gateway.RequestContextSlot +import datadog.trace.test.util.DDSpecification +import groovy.transform.CompileDynamic + +@CompileDynamic +class RequestHeaderHandlerTest extends DDSpecification { + void 'forwarded proto is set'(){ + given: + final handler = new RequestHeaderHandler() + final iastCtx = Mock(IastRequestContext) + final ctx = Mock(RequestContext) + ctx.getData(RequestContextSlot.IAST) >> iastCtx + + when: + handler.accept(ctx, 'X-Forwarded-Proto', 'https') + + then: + 1 * ctx.getData(RequestContextSlot.IAST) >> iastCtx + 1 * iastCtx.setxForwardedProto('https') + 0 * _ + } + + + void 'forwarded proto is not set'(){ + given: + final handler = new RequestHeaderHandler() + final iastCtx = Mock(IastRequestContext) + final ctx = Mock(RequestContext) + ctx.getData(RequestContextSlot.IAST) >> iastCtx + + when: + handler.accept(ctx, 'Custom-Header', 'https') + + then: + 1 * ctx.getData(RequestContextSlot.IAST) >> iastCtx + 0 * iastCtx.getxForwardedProto() + 0 * _ + } +} diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestStartedHandlerTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestStartedHandlerTest.groovy index f70e087b146..8a0ca4e2c4a 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestStartedHandlerTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/RequestStartedHandlerTest.groovy @@ -1,6 +1,5 @@ package com.datadog.iast -import com.datadog.iast.HasDependencies.Dependencies import com.datadog.iast.overhead.OverheadController import datadog.trace.api.Config import datadog.trace.api.gateway.Flow diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/LocationTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/LocationTest.groovy index 4ea0fba7d9e..656e4da309a 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/LocationTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/LocationTest.groovy @@ -1,16 +1,19 @@ package com.datadog.iast.model +import datadog.trace.bootstrap.instrumentation.api.AgentSpan import datadog.trace.test.util.DDSpecification class LocationTest extends DDSpecification { void 'forStack'() { given: + final span = Mock(AgentSpan) final spanId = 123456 + span.getSpanId() >> spanId final stack = new StackTraceElement("declaringClass", "methodName", "fileName", 42) when: - final location = Location.forSpanAndStack(spanId, stack) + final location = Location.forSpanAndStack(span, stack) then: location.getSpanId() == spanId @@ -21,12 +24,14 @@ class LocationTest extends DDSpecification { void 'forSpanAndClassAndMethod'() { given: + final span = Mock(AgentSpan) final spanId = 123456 + span.getSpanId() >> spanId final declaringClass = "declaringClass" final methodName = "methodName" when: - final location = Location.forSpanAndClassAndMethod(spanId, declaringClass, methodName) + final location = Location.forSpanAndClassAndMethod(span, declaringClass, methodName) then: location.getSpanId() == spanId diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/RangeTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/RangeTest.groovy index 5a3561fe7f8..c4992278756 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/RangeTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/RangeTest.groovy @@ -35,7 +35,7 @@ class RangeTest extends DDSpecification { def 'shift zero'() { given: final source = new Source(SourceTypes.NONE, null, null) - final orig = new Range(0, 1, source, Range.NOT_MARKED) + final orig = new Range(0, 1, source, VulnerabilityMarks.NOT_MARKED) when: final result = orig.shift(0) diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/VulnerabilityTypeTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/VulnerabilityTypeTest.groovy index 66d1319ea2d..408219c9000 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/VulnerabilityTypeTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/VulnerabilityTypeTest.groovy @@ -1,27 +1,65 @@ package com.datadog.iast.model +import datadog.trace.bootstrap.instrumentation.api.AgentSpan import datadog.trace.test.util.DDSpecification +import static com.datadog.iast.model.VulnerabilityType.INSECURE_COOKIE +import static com.datadog.iast.model.VulnerabilityType.NO_HTTPONLY_COOKIE +import static com.datadog.iast.model.VulnerabilityType.NO_SAMESITE_COOKIE import static com.datadog.iast.model.VulnerabilityType.WEAK_CIPHER +import static com.datadog.iast.model.VulnerabilityType.XCONTENTTYPE_HEADER_MISSING +import static com.datadog.iast.model.VulnerabilityType.HSTS_HEADER_MISSING class VulnerabilityTypeTest extends DDSpecification { - def 'test compute hash'(final VulnerabilityType type, final Location location, final Evidence evidence, final long expected) { + void 'test compute hash'() { when: final vulnerability = new Vulnerability(type, location, evidence) + then: vulnerability.hash == expected where: - type | location | evidence | expected - WEAK_CIPHER | Location.forSpanAndStack(123, new StackTraceElement("foo", "foo", "foo", 1)) | new Evidence("MD5") | 1045110372 - WEAK_CIPHER | Location.forSpanAndStack(456, new StackTraceElement("foo", "foo", "foo", 1)) | new Evidence("MD4") | 1045110372 - WEAK_CIPHER | Location.forSpanAndStack(789, new StackTraceElement("foo", "foo", "foo", 1)) | null | 1045110372 - WEAK_CIPHER | Location.forSpanAndClassAndMethod(123, "foo", "foo") | new Evidence("MD5") | 3265519776 - WEAK_CIPHER | Location.forSpanAndClassAndMethod(456, "foo", "foo") | new Evidence("MD4") | 3265519776 - WEAK_CIPHER | Location.forSpanAndClassAndMethod(789, "foo", "foo") | null | 3265519776 - WEAK_CIPHER | null | new Evidence("MD5") | 1272119222 - WEAK_CIPHER | null | new Evidence("MD4") | 1272119222 - WEAK_CIPHER | null | null | 1272119222 + type | location | evidence | expected + WEAK_CIPHER | getSpanAndStackLocation(123) | new Evidence("MD5") | 1045110372 + WEAK_CIPHER | getSpanAndStackLocation(456) | new Evidence("MD4") | 1045110372 + WEAK_CIPHER | getSpanAndStackLocation(789) | null | 1045110372 + WEAK_CIPHER | getSpanAndClassAndMethodLocation(123) | new Evidence("MD5") | 3265519776 + WEAK_CIPHER | getSpanAndClassAndMethodLocation(456) | new Evidence("MD4") | 3265519776 + WEAK_CIPHER | getSpanAndClassAndMethodLocation(789) | null | 3265519776 + INSECURE_COOKIE | getSpanAndStackLocation(123) | null | 3471934557 + INSECURE_COOKIE | getSpanAndStackLocation(123) | new Evidence("cookieName1") | 360083726 + INSECURE_COOKIE | getSpanAndStackLocation(123) | new Evidence("cookieName2") | 2357141684 + NO_HTTPONLY_COOKIE | getSpanAndStackLocation(123) | null | 2115643285 + NO_HTTPONLY_COOKIE | getSpanAndStackLocation(123) | new Evidence("cookieName1") | 585548920 + NO_HTTPONLY_COOKIE | getSpanAndStackLocation(123) | new Evidence("cookieName2") | 3153040834 + NO_SAMESITE_COOKIE | getSpanAndStackLocation(123) | null | 3683185539 + NO_SAMESITE_COOKIE | getSpanAndStackLocation(123) | new Evidence("cookieName1") | 881944211 + NO_SAMESITE_COOKIE | getSpanAndStackLocation(123) | new Evidence("cookieName2") | 2912433961 + XCONTENTTYPE_HEADER_MISSING | getSpanLocation(123, null) | null | 3429203725 + XCONTENTTYPE_HEADER_MISSING | getSpanLocation(123, 'serviceName1') | null | 2718833340 + XCONTENTTYPE_HEADER_MISSING | getSpanLocation(123, 'serviceName2') | null | 990333702 + HSTS_HEADER_MISSING | getSpanLocation(123, null) | null | 121310697 + HSTS_HEADER_MISSING | getSpanLocation(123, 'serviceName1') | null | 3533496951 + HSTS_HEADER_MISSING | getSpanLocation(123, 'serviceName2') | null | 1268102093 + } + + private Location getSpanAndStackLocation(final long spanId) { + final span = Mock(AgentSpan) + span.getSpanId() >> spanId + return Location.forSpanAndStack(span, new StackTraceElement("foo", "foo", "foo", 1)) + } + + private Location getSpanAndClassAndMethodLocation(final long spanId) { + final span = Mock(AgentSpan) + span.getSpanId() >> spanId + return Location.forSpanAndClassAndMethod(span, "foo", "foo") + } + + private Location getSpanLocation(final long spanId, final String serviceName) { + final span = Mock(AgentSpan) + span.getSpanId() >> spanId + span.getServiceName() >> serviceName + return Location.forSpan(span) } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/EvidenceEncodingTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/EvidenceEncodingTest.groovy index c946e53f70a..9b3d67e9339 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/EvidenceEncodingTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/EvidenceEncodingTest.groovy @@ -10,6 +10,8 @@ import datadog.trace.test.util.DDSpecification import org.skyscreamer.jsonassert.JSONAssert import spock.lang.Shared +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED + class EvidenceEncodingTest extends DDSpecification { private static final List SOURCES_SUITE = (0..2).collect { new Source((byte) it, "name$it", "value$it") } @@ -65,7 +67,7 @@ class EvidenceEncodingTest extends DDSpecification { } private static Range range(final int start, final int length, final Source source) { - return new Range(start, length, source, Range.NOT_MARKED) + return new Range(start, length, source, NOT_MARKED) } private static Source source(final int index) { diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/EvidenceRedactionTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/EvidenceRedactionTest.groovy index ab9ce895d37..255d531f819 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/EvidenceRedactionTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/EvidenceRedactionTest.groovy @@ -70,11 +70,8 @@ class EvidenceRedactionTest extends DDSpecification { new StringValuePart(null) | _ new StringValuePart('') | _ new RedactedValuePart(null) | _ - new RedactedValuePart('') | _ new TaintedValuePart(Mock(JsonAdapter), null, null, true) | _ - new TaintedValuePart(Mock(JsonAdapter), null, '', true) | _ new TaintedValuePart(Mock(JsonAdapter), null, null, false) | _ - new TaintedValuePart(Mock(JsonAdapter), null, '', false) | _ } void 'test #suite'() { diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/TaintedObjectEncodingTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/TaintedObjectEncodingTest.groovy index 55956358225..dfbc0fb8275 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/TaintedObjectEncodingTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/TaintedObjectEncodingTest.groovy @@ -10,6 +10,8 @@ import org.skyscreamer.jsonassert.JSONAssert import java.lang.ref.ReferenceQueue +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED + class TaintedObjectEncodingTest extends DDSpecification { @Override @@ -87,7 +89,7 @@ class TaintedObjectEncodingTest extends DDSpecification { private TaintedObject taintedObject(final String value, final byte sourceType, final String sourceName, final String sourceValue) { return new TaintedObject( value, - [new Range(0, value.length(), new Source(sourceType, sourceName, sourceValue), Range.NOT_MARKED)] as Range[], + [new Range(0, value.length(), new Source(sourceType, sourceName, sourceValue), NOT_MARKED)] as Range[], Mock(ReferenceQueue)) } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/VulnerabilityEncodingTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/VulnerabilityEncodingTest.groovy index c1bdd1f3908..4ade1c08a8d 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/VulnerabilityEncodingTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/model/json/VulnerabilityEncodingTest.groovy @@ -9,10 +9,14 @@ import com.datadog.iast.model.VulnerabilityBatch import com.datadog.iast.model.VulnerabilityType import datadog.trace.api.config.IastConfig import datadog.trace.api.iast.SourceTypes +import datadog.trace.bootstrap.instrumentation.api.AgentSpan import datadog.trace.test.util.DDSpecification import org.skyscreamer.jsonassert.JSONAssert -import static com.datadog.iast.model.Range.NOT_MARKED +import java.util.regex.Matcher +import java.util.regex.Pattern + +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED class VulnerabilityEncodingTest extends DDSpecification { @@ -27,7 +31,15 @@ class VulnerabilityEncodingTest extends DDSpecification { value.add(null) when: - final result = VulnerabilityEncoding.toJson(value) + def result = VulnerabilityEncoding.toJson(value) + + then: + JSONAssert.assertEquals('''{ + "vulnerabilities": [] + }''', result, true) + + when: + result = VulnerabilityEncoding.getExceededTagSizeJson(new TruncatedVulnerabilities(value.getVulnerabilities())) then: JSONAssert.assertEquals('''{ @@ -37,10 +49,13 @@ class VulnerabilityEncodingTest extends DDSpecification { void 'one vulnerability'() { given: + final span = Mock(AgentSpan) + final spanId = 123456 + span.getSpanId() >> spanId final value = new VulnerabilityBatch() value.add(new Vulnerability( VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456L, new StackTraceElement("foo", "fooMethod", "foo", 1)), + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), new Evidence("MD5") )) @@ -72,7 +87,7 @@ class VulnerabilityEncodingTest extends DDSpecification { final value = new VulnerabilityBatch() value.add(new Vulnerability( VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(0, new StackTraceElement("foo", "fooMethod", "foo", 1)), + Location.forSpanAndStack(null, new StackTraceElement("foo", "fooMethod", "foo", 1)), new Evidence("MD5") )) @@ -100,10 +115,13 @@ class VulnerabilityEncodingTest extends DDSpecification { void 'one vulnerability with one source'() { given: + final span = Mock(AgentSpan) + final spanId = 123456 + span.getSpanId() >> spanId final value = new VulnerabilityBatch() value.add(new Vulnerability( VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456L, new StackTraceElement("foo", "fooMethod", "foo", 1)), + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), new Evidence("BAD", [new Range(0, 1, new Source(SourceTypes.REQUEST_PARAMETER_VALUE, "key", "value"), NOT_MARKED)] as Range[]) )) @@ -142,10 +160,13 @@ class VulnerabilityEncodingTest extends DDSpecification { void 'one vulnerability with two sources'() { given: + final span = Mock(AgentSpan) + final spanId = 123456 + span.getSpanId() >> spanId final value = new VulnerabilityBatch() value.add(new Vulnerability( VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456L, new StackTraceElement("foo", "fooMethod", "foo", 1)), + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), new Evidence("BAD", [ new Range(0, 1, new Source(SourceTypes.REQUEST_PARAMETER_NAME, "key", "value"), NOT_MARKED), new Range(1, 1, new Source(SourceTypes.REQUEST_PARAMETER_VALUE, "key2", "value2"), NOT_MARKED) @@ -193,10 +214,13 @@ class VulnerabilityEncodingTest extends DDSpecification { void 'one vulnerability with null source'() { given: + final span = Mock(AgentSpan) + final spanId = 123456 + span.getSpanId() >> spanId final value = new VulnerabilityBatch() value.add(new Vulnerability( VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456L, new StackTraceElement("foo", "fooMethod", "foo", 1)), + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), new Evidence("BAD", [new Range(0, 1, null, NOT_MARKED)] as Range[]) )) @@ -228,10 +252,13 @@ class VulnerabilityEncodingTest extends DDSpecification { void 'one vulnerability with no source type'() { given: + final span = Mock(AgentSpan) + final spanId = 123456 + span.getSpanId() >> spanId final value = new VulnerabilityBatch() value.add(new Vulnerability( VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456L, new StackTraceElement("foo", "fooMethod", "foo", 1)), + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), new Evidence("BAD", [new Range(0, 1, new Source(SourceTypes.NONE, "key", "value"), NOT_MARKED)] as Range[]) )) @@ -269,16 +296,19 @@ class VulnerabilityEncodingTest extends DDSpecification { void 'two vulnerabilities with one shared source'() { given: + final span = Mock(AgentSpan) + final spanId = 123456 + span.getSpanId() >> spanId final value = new VulnerabilityBatch() final source = new Source(SourceTypes.REQUEST_PARAMETER_VALUE, "key", "value") value.add(new Vulnerability( VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456L, new StackTraceElement("foo", "fooMethod", "foo", 1)), + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), new Evidence("BAD1", [new Range(0, 1, source, NOT_MARKED)] as Range[]) )) value.add(new Vulnerability( VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456L, new StackTraceElement("foo", "fooMethod", "foo", 1)), + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), new Evidence("BAD2", [new Range(0, 1, source, NOT_MARKED)] as Range[]) )) @@ -333,15 +363,18 @@ class VulnerabilityEncodingTest extends DDSpecification { void 'two vulnerability with no shared sources'() { given: + final span = Mock(AgentSpan) + final spanId = 123456 + span.getSpanId() >> spanId final value = new VulnerabilityBatch() value.add(new Vulnerability( VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456L, new StackTraceElement("foo", "fooMethod", "foo", 1)), + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), new Evidence("BAD", [new Range(0, 1, new Source(SourceTypes.REQUEST_PARAMETER_VALUE, "key1", "value"), NOT_MARKED)] as Range[]) )) value.add(new Vulnerability( VulnerabilityType.WEAK_HASH, - Location.forSpanAndStack(123456L, new StackTraceElement("foo", "fooMethod", "foo", 1)), + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), new Evidence("BAD", [new Range(0, 1, new Source(SourceTypes.REQUEST_PARAMETER_VALUE, "key2", "value"), NOT_MARKED)] as Range[]) )) @@ -398,4 +431,162 @@ class VulnerabilityEncodingTest extends DDSpecification { ] }''', result, true) } + + void 'one truncated vulnerability'() { + given: + final span = Mock(AgentSpan) + final spanId = 123456 + span.getSpanId() >> spanId + final value = new VulnerabilityBatch() + value.add(new Vulnerability( + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), + new Evidence(generateLargeString()) + )) + + when: + final result = VulnerabilityEncoding.getExceededTagSizeJson(new TruncatedVulnerabilities(value.getVulnerabilities())) + + then: + JSONAssert.assertEquals('''{ + "vulnerabilities": [ + { + "type": "WEAK_HASH", + "evidence": { + "value": "MAX SIZE EXCEEDED" + }, + "hash":1042880134, + "location": { + "spanId": 123456, + "line": 1, + "method": "fooMethod", + "path": "foo" + } + } + ] + }''', result, true) + } + + void 'two truncated vulnerabilities'() { + given: + final span = Mock(AgentSpan) + final spanId = 123456 + span.getSpanId() >> spanId + final value = new VulnerabilityBatch() + value.add(new Vulnerability( + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), + new Evidence(generateLargeString(), [new Range(0, 1, new Source(SourceTypes.REQUEST_PARAMETER_VALUE, "key1", "value"), NOT_MARKED)] as Range[]) + )) + value.add(new Vulnerability( + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), + new Evidence(generateLargeString(), [new Range(0, 1, new Source(SourceTypes.REQUEST_PARAMETER_VALUE, "key2", "value"), NOT_MARKED)] as Range[]) + )) + + when: + final result = VulnerabilityEncoding.getExceededTagSizeJson(new TruncatedVulnerabilities(value.getVulnerabilities())) + + then: + JSONAssert.assertEquals('''{ + "vulnerabilities": [ + { + "evidence": { + "value": "MAX SIZE EXCEEDED" + }, + "hash": 1042880134, + "location": { + "spanId": 123456, + "line": 1, + "method": "fooMethod", + "path": "foo" + }, + "type": "WEAK_HASH" + }, + { + "evidence": { + "value": "MAX SIZE EXCEEDED" + }, + "hash": 1042880134, + "location": { + "spanId": 123456, + "line": 1, + "method": "fooMethod", + "path": "foo" + }, + "type": "WEAK_HASH" + } + ] + }''', result, true) + } + + void 'when json is greater than 25kb VulnerabilityEncoding#getExceededTagSizeJson is called'(){ + given: + final span = Mock(AgentSpan) + final spanId = 123456 + span.getSpanId() >> spanId + final value = new VulnerabilityBatch() + for (int i = 0 ; i < 40; i++){ + value.add(generateBigVulnerability(span)) + } + + when: + final result = VulnerabilityEncoding.toJson(value) + + then: 'all sources have been removed and all vulnerabilities have generic evidence' + !result.contains("source") //sources have been removed + countGenericEvidenceOccurrences(result) == value.getVulnerabilities().size() //All the vulnerabilities have generic evidence + + } + + void 'exception during serialization is caught'() { + given: + final value = new VulnerabilityBatch() + final type = Mock(VulnerabilityType) { + name() >> { throw new RuntimeException("ERROR") } + } + final vuln = new Vulnerability(type, null, null) + value.add(vuln) + + when: + final result = VulnerabilityEncoding.toJson(value) + + then: + JSONAssert.assertEquals('''{ + "vulnerabilities": [ + ] + }''', result, true) + } + + private static String generateLargeString(){ + int targetSize = 25 * 1024 + StringBuilder sb = new StringBuilder() + Random random = new Random() + while (sb.length() < targetSize){ + sb.append(random.nextInt()) + } + return sb.toString() + } + + + private static Vulnerability generateBigVulnerability(AgentSpan span){ + String largeString = generateLargeString() + return new Vulnerability( + VulnerabilityType.WEAK_HASH, + Location.forSpanAndStack(span, new StackTraceElement("foo", "fooMethod", "foo", 1)), + new Evidence(largeString, [ + new Range(0, largeString.length(), new Source(SourceTypes.REQUEST_PARAMETER_VALUE, "key2", largeString), NOT_MARKED) + ] as Range[]) + ) + } + + private static int countGenericEvidenceOccurrences(final String input){ + Pattern pattern = Pattern.compile("\"evidence\":\\{\"value\":\"MAX SIZE EXCEEDED\"}") + Matcher matcher = pattern.matcher(input) + int count = 0 + while (matcher.find()){ + count++ + } + return count + } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/overhead/OverheadControllerTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/overhead/OverheadControllerTest.groovy index aa93451d9c7..e784643f909 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/overhead/OverheadControllerTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/overhead/OverheadControllerTest.groovy @@ -47,9 +47,9 @@ class OverheadControllerTest extends DDSpecification { where: samplingPct | requests | expectedSampledRequests DEFAULT_REQUEST_SAMPLING | 100 | 33 - 30 | 100 | 33 - 30 | 10 | 3 - 30 | 9 | 3 + 33 | 100 | 33 + 33 | 10 | 3 + 33 | 9 | 3 50 | 100 | 50 50 | 10 | 5 100 | 1 | 1 @@ -57,6 +57,8 @@ class OverheadControllerTest extends DDSpecification { 200 | 100 | 100 1000 | 100 | 100 0 | 100 | 100 + 51 | 100 | 51 + 99 | 100 | 99 } void 'No more than two request can be acquired concurrently'() { diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/PropagationModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/PropagationModuleTest.groovy index a06b44e4fa5..b0dceae6377 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/PropagationModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/PropagationModuleTest.groovy @@ -4,6 +4,9 @@ import com.datadog.iast.IastModuleImplTestBase import com.datadog.iast.IastRequestContext import com.datadog.iast.model.Range import com.datadog.iast.model.Source +import com.datadog.iast.taint.Ranges +import com.datadog.iast.taint.TaintedObject +import datadog.trace.api.Config import datadog.trace.api.gateway.RequestContext import datadog.trace.api.gateway.RequestContextSlot import datadog.trace.api.iast.SourceTypes @@ -11,25 +14,19 @@ import datadog.trace.api.iast.Taintable import datadog.trace.api.iast.VulnerabilityMarks import datadog.trace.api.iast.propagation.PropagationModule import datadog.trace.bootstrap.instrumentation.api.AgentSpan -import groovy.transform.CompileDynamic +import org.junit.Assume -import static com.datadog.iast.model.Range.NOT_MARKED -import static com.datadog.iast.taint.TaintUtils.addFromTaintFormat -import static com.datadog.iast.taint.TaintUtils.fromTaintFormat -import static datadog.trace.api.iast.VulnerabilityMarks.SQL_INJECTION_MARK -import static datadog.trace.api.iast.VulnerabilityMarks.XPATH_INJECTION_MARK -import static datadog.trace.api.iast.VulnerabilityMarks.XSS_MARK +import static com.datadog.iast.taint.Ranges.highestPriorityRange +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED -@CompileDynamic class PropagationModuleTest extends IastModuleImplTestBase { private PropagationModule module - private List objectHolder + private IastRequestContext ctx - def setup() { + void setup() { module = new PropagationModuleImpl() - objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { getData(RequestContextSlot.IAST) >> ctx @@ -40,54 +37,49 @@ class PropagationModuleTest extends IastModuleImplTestBase { tracer.activeSpan() >> span } - void '#method null or empty'() { - when: + void '#method(#args) not taintable'() { + when: 'there is no context by default' module.&"$method".call(args.toArray()) - then: + then: 'no mock calls should happen' + 0 * _ + + when: 'there is a context' + args.add(0, ctx) + module.&"$method".call(args.toArray()) + + then: 'no mock calls should happen' 0 * _ where: - method | args - 'taintIfInputIsTainted' | [null, null] - 'taintIfInputIsTainted' | [null, new Object()] - 'taintIfInputIsTainted' | [null, 'test'] - 'taintIfInputIsTainted' | [new Object(), null] - 'taintIfInputIsTainted' | [null as String, null] - 'taintIfInputIsTainted' | ['', null] - 'taintIfInputIsTainted' | ['', new Object()] - 'taintIfInputIsTainted' | [null as String, new Object()] - 'taintIfInputIsTainted' | ['test', null] - 'taintIfInputIsTainted' | [null as String, 'test'] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', null as String, null] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', '', null] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', '', new Object()] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', null as String, new Object()] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'test', null] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', null as String, 'test'] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, [].toSet(), 'test'] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, ['test'].toSet(), null] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, [:].entrySet().toList(), 'test'] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, [key: "value"].entrySet().toList(), null] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', null as Collection, 'test'] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', [], 'test'] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', ['value'], null] - 'taintObjectIfInputIsTaintedKeepingRanges' | [null, new Object()] - 'taintObjectIfInputIsTaintedKeepingRanges' | [new Object(), null] - 'taintIfAnyInputIsTainted' | [null, null] - 'taintIfAnyInputIsTainted' | [null, [].toArray()] - 'taintIfAnyInputIsTainted' | ['test', [].toArray()] - 'taint' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', null as String] - 'taint' | [SourceTypes.REQUEST_PARAMETER_VALUE, null as String, null as String] - 'taint' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', ''] - 'taintObjects' | [SourceTypes.REQUEST_PARAMETER_VALUE, null as Object[]] - 'taintObjects' | [SourceTypes.REQUEST_PARAMETER_VALUE, [] as Object[]] - 'taintObjects' | [SourceTypes.REQUEST_PARAMETER_VALUE, null as Collection] - 'taintIfInputIsTaintedWithMarks' | ['', null, VulnerabilityMarks.XSS_MARK] - 'taintIfInputIsTaintedWithMarks' | ['', new Object(), VulnerabilityMarks.XSS_MARK] - 'taintIfInputIsTaintedWithMarks' | [null as String, new Object(), VulnerabilityMarks.XSS_MARK] - 'taintIfInputIsTaintedWithMarks' | ['test', null, VulnerabilityMarks.XSS_MARK] - 'taintIfInputIsTaintedWithMarks' | [null as String, 'test', VulnerabilityMarks.XSS_MARK] + method | args + 'taint' | [null, SourceTypes.REQUEST_PARAMETER_VALUE] + 'taint' | [null, SourceTypes.REQUEST_PARAMETER_VALUE, 'name'] + 'taint' | [null, SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value'] + 'taintIfTainted' | [null, 'test'] + 'taintIfTainted' | ['test', null] + 'taintIfTainted' | [null, 'test', false, NOT_MARKED] + 'taintIfTainted' | ['test', null, false, NOT_MARKED] + 'taintIfTainted' | [null, 'test'] + 'taintIfTainted' | ['test', null] + 'taintIfTainted' | [null, 'test', SourceTypes.REQUEST_PARAMETER_VALUE] + 'taintIfTainted' | ['test', null, SourceTypes.REQUEST_PARAMETER_VALUE] + 'taintIfTainted' | [null, 'test', SourceTypes.REQUEST_PARAMETER_VALUE, 'name'] + 'taintIfTainted' | ['test', null, SourceTypes.REQUEST_PARAMETER_VALUE, 'name'] + 'taintIfTainted' | [null, 'test', SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value'] + 'taintIfTainted' | ['test', null, SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value'] + 'taintIfAnyTainted' | [null, ['test'] as Object[]] + 'taintIfAnyTainted' | ['test', null] + 'taintIfAnyTainted' | ['test', [] as Object[]] + 'taintDeeply' | [ + null, + SourceTypes.REQUEST_PARAMETER_VALUE, + { + true + } + ] + 'findSource' | [null] + 'isTainted' | [null] } void '#method without span'() { @@ -99,438 +91,473 @@ class PropagationModuleTest extends IastModuleImplTestBase { 0 * _ where: - method | args - 'taintIfInputIsTainted' | [new Object(), new Object()] - 'taintIfInputIsTainted' | [new Object(), 'test'] - 'taintIfInputIsTainted' | ['test', new Object()] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value', new Object()] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', ['value'], new Object()] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, ['value'].toSet(), new Object()] - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, [key: 'value'].entrySet().toList(), new Object()] - 'taintObjectIfInputIsTaintedKeepingRanges' | [new Object(), new Object()] - 'taintIfAnyInputIsTainted' | ['value', ['test', 'test2'].toArray()] - 'taint' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value'] - 'taintObjects' | [SourceTypes.REQUEST_PARAMETER_VALUE, [new Object()] as Object[]] - 'taintObjects' | [SourceTypes.REQUEST_PARAMETER_VALUE, [new Object()]] - 'taintObjects' | [SourceTypes.REQUEST_PARAMETER_VALUE, [new Object()] as Collection] - 'taintIfInputIsTaintedWithMarks' | ['test', new Object(), VulnerabilityMarks.XSS_MARK] + method | args + 'taint' | ['test', SourceTypes.REQUEST_PARAMETER_VALUE] + 'taint' | ['test', SourceTypes.REQUEST_PARAMETER_VALUE, 'name'] + 'taint' | ['test', SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value'] + 'taintIfTainted' | ['test', 'test'] + 'taintIfTainted' | ['test', 'test', false, NOT_MARKED] + 'taintIfTainted' | ['test', 'test', SourceTypes.REQUEST_PARAMETER_VALUE] + 'taintIfTainted' | ['test', 'test', SourceTypes.REQUEST_PARAMETER_VALUE, 'name'] + 'taintIfTainted' | ['test', 'test', SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value'] + 'taintIfAnyTainted' | ['test', ['test']] + 'taintDeeply' | [ + 'test', + SourceTypes.REQUEST_PARAMETER_VALUE, + { + true + } + ] + 'findSource' | ['test'] + 'isTainted' | ['test'] } - void 'test propagation for #method'() { + void 'test taint'() { given: - final toTaint = toTaintClosure.call(args) - final targetMethod = module.&"$method" - final arguments = args.toArray() - final input = inputClosure.call(arguments) + final value = (target instanceof CharSequence) ? target.toString() : null + final source = taintedSource(value) + final ranges = Ranges.forObject(source) when: - targetMethod.call(arguments) + module.taint(target, source.origin, source.name, source.value) then: - assertNotTainted(toTaint) + final tainted = getTaintedObject(target) + if (shouldTaint) { + assertTainted(tainted, ranges) + } else { + assert tainted == null + } - when: - taint(input) - targetMethod.call(arguments) + where: + target | shouldTaint + string('string') | true + stringBuilder('stringBuilder') | true + date() | true + taintable() | true + } + + void 'test taintIfTainted keeping ranges'() { + given: + def (target, input) = suite + final source = taintedSource() + final ranges = [new Range(0, 1, source, NOT_MARKED), new Range(1, 1, source, NOT_MARKED)] as Range[] + + when: 'input is not tainted' + module.taintIfTainted(target, input, true, NOT_MARKED) then: - assertTainted(toTaint) + assert getTaintedObject(target) == null - where: - method | args | toTaintClosure | inputClosure - 'taintIfInputIsTainted' | [new Object(), 'I am an string'] | { - it[0] - } | { - it[1] - } - 'taintIfInputIsTainted' | [new Object(), new Object()] | { - it[0] - } | { - it[1] - } - 'taintIfInputIsTainted' | [new Object(), new MockTaintable()] | { - it[0] - } | { - it[1] - } - 'taintIfInputIsTainted' | ['Hello', 'I am an string'] | { - it[0] - } | { - it[1] - } - 'taintIfInputIsTainted' | ['Hello', new Object()] | { - it[0] - } | { - it[1] - } - 'taintIfInputIsTainted' | ['Hello', new MockTaintable()] | { - it[0] - } | { - it[1] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value', 'I am an string'] | { - it[2] - } | { - it[3] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value', new Object()] | { - it[2] - } | { - it[3] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value', new MockTaintable()] | { - it[2] - } | { - it[3] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', ['value'], 'I am an string'] | { - it[2][0] - } | { - it[3] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', ['value'], new Object()] | { - it[2][0] - } | { - it[3] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, 'name', ['value'], new MockTaintable()] | { - it[2][0] - } | { - it[3] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, ['value'].toSet(), 'I am an string'] | { - it[1][0] - } | { - it[2] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, ['value'].toSet(), new Object()] | { - it[1][0] - } | { - it[2] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, ['value'].toSet(), new MockTaintable()] | { - it[1][0] - } | { - it[2] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, [name: 'value'].entrySet().toList(), 'I am an string'] | { - it[1][0].value - } | { - it[2] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, [name: 'value'].entrySet().toList(), new Object()] | { - it[1][0].value - } | { - it[2] - } - 'taintIfInputIsTainted' | [SourceTypes.REQUEST_PARAMETER_VALUE, [name: 'value'].entrySet().toList(), new MockTaintable()] | { - it[1][0].value - } | { - it[2] - } - 'taintObjectIfInputIsTaintedKeepingRanges' | [new Object(), new Object()] | { - it[0] - } | { - it[1] - } - 'taintObjectIfInputIsTaintedKeepingRanges' | [new Object(), new MockTaintable()] | { - it[0] - } | { - it[1] - } - 'taintIfAnyInputIsTainted' | [new Object(), ['I am an string'].toArray()] | { - it[0] - } | { - it[1][0] - } - 'taintIfAnyInputIsTainted' | [new Object(), [new Object()].toArray()] | { - it[0] - } | { - it[1][0] - } - 'taintIfAnyInputIsTainted' | [new Object(), [new MockTaintable()].toArray()] | { - it[0] - } | { - it[1][0] - } - 'taintIfAnyInputIsTainted' | ['Hello', ['I am an string'].toArray()] | { - it[0] - } | { - it[1][0] - } - 'taintIfAnyInputIsTainted' | ['Hello', [new Object()].toArray()] | { - it[0] - } | { - it[1][0] - } - 'taintIfAnyInputIsTainted' | ['Hello', [new MockTaintable()].toArray()] | { - it[0] - } | { - it[1][0] - } - 'taintIfInputIsTaintedWithMarks' | ['Hello', 'I am an string', VulnerabilityMarks.XSS_MARK] | { - it[0] - } | { - it[1] - } - 'taintIfInputIsTaintedWithMarks' | ['Hello', new Object(), VulnerabilityMarks.XSS_MARK] | { - it[0] - } | { - it[1] - } - 'taintIfInputIsTaintedWithMarks' | ['Hello', new MockTaintable(), VulnerabilityMarks.XSS_MARK] | { - it[0] - } | { - it[1] + when: 'input is tainted' + final taintedFrom = taintObject(input, ranges) + module.taintIfTainted(target, input, true, NOT_MARKED) + + then: + final tainted = getTaintedObject(target) + if (target instanceof Taintable) { + // only first range is kept + assertTainted(tainted, [taintedFrom.ranges[0]] as Range[]) + } else { + assertTainted(tainted, taintedFrom.ranges) } + + where: + suite << taintIfSuite() } - void 'test value source for #method'() { + void 'test taintIfTainted keeping ranges with a mark'() { given: - final span = Mock(AgentSpan) - tracer.activeSpan() >> span - final reqCtx = Mock(RequestContext) - span.getRequestContext() >> reqCtx - final ctx = new IastRequestContext() - reqCtx.getData(RequestContextSlot.IAST) >> ctx + def (target, input) = suite + Assume.assumeFalse(target instanceof Taintable) // taintable does not support multiple ranges or marks + final source = taintedSource() + final ranges = [new Range(0, 1, source, NOT_MARKED), new Range(1, 1, source, NOT_MARKED)] as Range[] + final mark = VulnerabilityMarks.UNVALIDATED_REDIRECT_MARK - when: - module."$method"(source, name, value) + when: 'input is not tainted' + module.taintIfTainted(target, input, true, mark) then: - 1 * tracer.activeSpan() >> span - 1 * span.getRequestContext() >> reqCtx - 1 * reqCtx.getData(RequestContextSlot.IAST) >> ctx - 0 * _ - ctx.getTaintedObjects().get(name) == null - def to = ctx.getTaintedObjects().get(value) - to != null - to.get() == value - to.ranges.size() == 1 - to.ranges[0].start == 0 - to.ranges[0].length == value.length() - to.ranges[0].source == new Source(source, name, value) + assert getTaintedObject(target) == null + + when: 'input is tainted' + final taintedFrom = taintObject(input, ranges) + module.taintIfTainted(target, input, true, mark) + + then: + final tainted = getTaintedObject(target) + assertTainted(tainted, taintedFrom.ranges, mark) where: - method | name | value | source - 'taint' | null | 'value' | SourceTypes.REQUEST_PARAMETER_VALUE - 'taint' | 'name' | 'value' | SourceTypes.REQUEST_PARAMETER_VALUE + suite << taintIfSuite() } - void 'taint with context for #method'() { - setup: - def ctx = new IastRequestContext() + void 'test taintIfTainted not keeping ranges'() { + given: + def (target, input) = suite + final source = taintedSource() + final ranges = [new Range(0, 1, source, NOT_MARKED), new Range(1, 1, source, NOT_MARKED)] as Range[] - when: - module."$method"(ctx as Object, source, name, value) + when: 'input is not tainted' + module.taintIfTainted(target, input, false, NOT_MARKED) then: - ctx.getTaintedObjects().get(name) == null - def to = ctx.getTaintedObjects().get(value) - to != null - to.get() == value - to.ranges.size() == 1 - to.ranges[0].start == 0 - to.ranges[0].length == value.length() - to.ranges[0].source == new Source(source, name, value) - 0 * _ + assert getTaintedObject(target) == null + + when: 'input is tainted' + final taintedFrom = taintObject(input, ranges) + module.taintIfTainted(target, input, false, NOT_MARKED) + + then: + final tainted = getTaintedObject(target) + assertTainted(tainted, [highestPriorityRange(taintedFrom.ranges)] as Range[]) where: - method | name | value | source - 'taint' | null | "value" | SourceTypes.REQUEST_PATH_PARAMETER - 'taint' | "" | "value" | SourceTypes.REQUEST_PATH_PARAMETER - 'taint' | "param" | "value" | SourceTypes.REQUEST_PATH_PARAMETER + suite << taintIfSuite() } - void 'test taintObject'() { - when: - module.taintObject(origin, toTaint) + void 'test taintIfTainted not keeping ranges with a mark'() { + given: + def (target, input) = suite + Assume.assumeFalse(target instanceof Taintable) // taintable does not support marks + final source = taintedSource() + final ranges = [new Range(0, 1, source, NOT_MARKED), new Range(1, 1, source, NOT_MARKED)] as Range[] + final mark = VulnerabilityMarks.LDAP_INJECTION_MARK + + when: 'input is not tainted' + module.taintIfTainted(target, input, false, mark) then: - assertTainted(toTaint) + assert getTaintedObject(target) == null + + when: 'input is tainted' + final taintedFrom = taintObject(input, ranges) + module.taintIfTainted(target, input, false, mark) + + then: + final tainted = getTaintedObject(target) + assertTainted(tainted, [highestPriorityRange(taintedFrom.ranges)] as Range[], mark) where: - origin | toTaint - SourceTypes.REQUEST_PARAMETER_VALUE | new Object() - SourceTypes.REQUEST_PARAMETER_VALUE | new MockTaintable() + suite << taintIfSuite() } - void 'test taintObjects[array]'() { - when: - module.taintObjects(origin, new Object[]{ - toTaint - }) + void 'test taintIfAnyTainted keeping ranges'() { + given: + def (target, input) = suite + final inputs = ['test', input].toArray() + final source = taintedSource() + final ranges = [new Range(0, 1, source, NOT_MARKED), new Range(1, 1, source, NOT_MARKED)] as Range[] + + when: 'input is not tainted' + module.taintIfAnyTainted(target, inputs, true, NOT_MARKED) then: - assertTainted(toTaint) + assert getTaintedObject(target) == null + + when: 'input is tainted' + final taintedFrom = taintObject(input, ranges) + module.taintIfAnyTainted(target, inputs, true, NOT_MARKED) + + then: + final tainted = getTaintedObject(target) + if (target instanceof Taintable) { + // only first range is kept + assertTainted(tainted, [taintedFrom.ranges[0]] as Range[]) + } else { + assertTainted(tainted, taintedFrom.ranges) + } where: - origin | toTaint - SourceTypes.REQUEST_PARAMETER_VALUE | new Object() - SourceTypes.REQUEST_PARAMETER_VALUE | new MockTaintable() + suite << taintIfSuite() } - void 'onJsonFactoryCreateParser'() { + void 'test taintIfAnyTainted keeping ranges with a mark'() { given: - final taintedObjects = ctx.getTaintedObjects() - def shouldBeTainted = true + def (target, input) = suite + Assume.assumeFalse(target instanceof Taintable) // taintable does not support multiple ranges or marks + final inputs = ['test', input].toArray() + final source = taintedSource() + final ranges = [new Range(0, 1, source, NOT_MARKED), new Range(1, 1, source, NOT_MARKED)] as Range[] + final mark = VulnerabilityMarks.UNVALIDATED_REDIRECT_MARK - def firstParam - if (param1 instanceof String) { - firstParam = addFromTaintFormat(taintedObjects, param1) - objectHolder.add(firstParam) - } else { - firstParam = param1 - } + when: 'input is not tainted' + module.taintIfAnyTainted(target, inputs, true, mark) - def secondParam - if (param2 instanceof String) { - secondParam = addFromTaintFormat(taintedObjects, param2) - objectHolder.add(secondParam) - shouldBeTainted = fromTaintFormat(param2) != null - } else { - secondParam = param2 - } + then: + assert getTaintedObject(target) == null - if (shouldBeTainted) { - def ranges = new Range[1] - ranges[0] = new Range(0, Integer.MAX_VALUE, new Source((byte) 1, "test", "test"), NOT_MARKED) - taintedObjects.taint(secondParam, ranges) - } + when: 'input is tainted' + final taintedFrom = taintObject(input, ranges) + module.taintIfAnyTainted(target, inputs, true, mark) + + then: + final tainted = getTaintedObject(target) + assertTainted(tainted, taintedFrom.ranges, mark) + + where: + suite << taintIfSuite() + } + + void 'test taintIfAnyTainted not keeping ranges'() { + given: + def (target, input) = suite + final inputs = ['test', input].toArray() + final source = taintedSource() + final ranges = [new Range(0, 1, source, NOT_MARKED), new Range(1, 1, source, NOT_MARKED)] as Range[] + + when: 'input is not tainted' + module.taintIfAnyTainted(target, inputs, false, NOT_MARKED) + + then: + assert getTaintedObject(target) == null + + when: 'input is tainted' + final taintedFrom = taintObject(input, ranges) + module.taintIfAnyTainted(target, inputs, false, NOT_MARKED) + + then: + final tainted = getTaintedObject(target) + assertTainted(tainted, [highestPriorityRange(taintedFrom.ranges)] as Range[]) + + where: + suite << taintIfSuite() + } + + void 'test taintIfAnyTainted not keeping ranges with a mark'() { + given: + def (target, input) = suite + Assume.assumeFalse(target instanceof Taintable) // taintable does not support marks + final inputs = ['test', input].toArray() + final source = taintedSource() + final ranges = [new Range(0, 1, source, NOT_MARKED), new Range(1, 1, source, NOT_MARKED)] as Range[] + final mark = VulnerabilityMarks.LDAP_INJECTION_MARK + + when: 'input is not tainted' + module.taintIfAnyTainted(target, inputs, false, mark) + + then: + assert getTaintedObject(target) == null + + when: 'input is tainted' + final taintedFrom = taintObject(input, ranges) + module.taintIfAnyTainted(target, inputs, false, mark) + + then: + final tainted = getTaintedObject(target) + assertTainted(tainted, [highestPriorityRange(taintedFrom.ranges)] as Range[], mark) + + where: + suite << taintIfSuite() + } + + void 'test taint deeply'() { + given: + final target = [Hello: " World!", Age: 25] when: - module.taintIfInputIsTainted(firstParam, secondParam) + module.taintDeeply(target, SourceTypes.GRPC_BODY, { true }) then: - def to = ctx.getTaintedObjects().get(param1) - if (shouldBeTainted) { - assert to != null - assert to.get() == param1 - if (param1 instanceof String) { - final ranges = to.getRanges() - assert ranges.length == 1 - assert ranges[0].start == 0 - assert ranges[0].length == param1.length() - } else { - final ranges = to.getRanges() - assert ranges.length == 1 - assert ranges[0].start == 0 - assert ranges[0].length == Integer.MAX_VALUE - } - } else { - assert to == null + final taintedObjects = ctx.taintedObjects + target.keySet().each { key -> + assert taintedObjects.get(key) != null } + assert taintedObjects.get(target['Hello']) != null + assert taintedObjects.size() == 3 // two keys and one string value + } - where: - param1 | param2 - '123' | new Object() - new Object() | new Object() - new Object() | '123' - new Object() | '==>123<==' + void 'test taint deeply char sequence'() { + given: + final target = stringBuilder('taint me') + + when: + module.taintDeeply(target, SourceTypes.GRPC_BODY, { true }) + + then: + final taintedObjects = ctx.taintedObjects + assert taintedObjects.size() == 1 + final tainted = taintedObjects.get(target) + assert tainted != null + final source = tainted.ranges[0].source + assert source.origin == SourceTypes.GRPC_BODY + assert source.value == target.toString() } - void 'test first tainted source'() { + void 'test is tainted and find source'() { + given: + if (source != null) { + taintObject(target, source) + } + when: - final before = module.firstTaintedSource(target) + final tainted = module.isTainted(target) then: - before == null + tainted == (source != null) when: - module.taintObject(origin, target) - final after = module.firstTaintedSource(target) + final foundSource = module.findSource(target) then: - after.origin == origin + foundSource == source where: - target | origin - 'this is a string' | SourceTypes.REQUEST_PARAMETER_VALUE - new Object() | SourceTypes.REQUEST_PARAMETER_VALUE - new MockTaintable() | SourceTypes.REQUEST_PARAMETER_VALUE + target | source + string('string') | null + stringBuilder('stringBuilder') | null + date() | null + taintable() | null + string('string') | taintedSource() + stringBuilder('stringBuilder') | taintedSource() + date() | taintedSource() + taintable() | taintedSource() } - void 'test taintIfInputIsTaintedWithMarks marks ranges for #mark'() { + void 'test source names over threshold'() { given: - final toTaint = 'this is a string' - final tainted = new Object() - ctx.getTaintedObjects().taint(tainted, previousRanges) - objectHolder.add(toTaint) + final maxSize = Config.get().iastTruncationMaxValueLength when: - module.taintIfInputIsTaintedWithMarks(toTaint, tainted, mark) + module.taint(target, SourceTypes.REQUEST_PARAMETER_VALUE) then: - final to = ctx.getTaintedObjects().get(toTaint) - final ranges = to.getRanges() - ranges != null && ranges.length == 1 - ranges[0].marks == expected + final tainted = ctx.getTaintedObjects().get(target) + tainted != null + final sourceValue = tainted.ranges.first().source.value + sourceValue.length() <= target.length() + sourceValue.length() <= maxSize where: - previousRanges | mark | expected - [new Range(0, Integer.MAX_VALUE, getDefaultSource(), NOT_MARKED)] as Range[] | XSS_MARK | XSS_MARK - [new Range(0, 1, getDefaultSource(), SQL_INJECTION_MARK), new Range(2, 3, getDefaultSource(), NOT_MARKED)] as Range[] | XSS_MARK | XSS_MARK - [new Range(2, 3, getDefaultSource(), NOT_MARKED), new Range(0, 1, getDefaultSource(), SQL_INJECTION_MARK)] as Range[] | XSS_MARK | XSS_MARK - [new Range(2, 3, getDefaultSource(), XPATH_INJECTION_MARK), new Range(0, 1, getDefaultSource(), SQL_INJECTION_MARK)] as Range[] | XSS_MARK | getMarks(XPATH_INJECTION_MARK, XSS_MARK) + target | _ + string((0..Config.get().getIastTruncationMaxValueLength() * 2).join('')) | _ + stringBuilder((0..Config.get().getIastTruncationMaxValueLength() * 2).join('')) | _ + } + + private List> taintIfSuite() { + return [ + Tuple.tuple(string('string'), string('string')), + Tuple.tuple(string('string'), stringBuilder('stringBuilder')), + Tuple.tuple(string('string'), date()), + Tuple.tuple(string('string'), taintable()), + Tuple.tuple(stringBuilder('stringBuilder'), string('string')), + Tuple.tuple(stringBuilder('stringBuilder'), stringBuilder('stringBuilder')), + Tuple.tuple(stringBuilder('stringBuilder'), date()), + Tuple.tuple(stringBuilder('stringBuilder'), taintable()), + Tuple.tuple(date(), string('string')), + Tuple.tuple(date(), stringBuilder('stringBuilder')), + Tuple.tuple(date(), date()), + Tuple.tuple(date(), taintable()), + Tuple.tuple(taintable(), string('string')), + Tuple.tuple(taintable(), stringBuilder('stringBuilder')), + Tuple.tuple(taintable(), date()), + Tuple.tuple(taintable(), taintable()) + ] } - private E taint(final E toTaint) { - final source = new Source(SourceTypes.REQUEST_PARAMETER_VALUE, null, null) - if (toTaint instanceof Taintable) { - toTaint.$$DD$setSource(source) + private TaintedObject getTaintedObject(final Object target) { + if (target instanceof Taintable) { + final source = (target as Taintable).$$DD$getSource() as Source + return source == null ? null : new TaintedObject(target, Ranges.forObject(source), null) + } + return ctx.getTaintedObjects().get(target) + } + + private TaintedObject taintObject(final Object target, Source source, int mark = NOT_MARKED) { + if (target instanceof Taintable) { + target.$$DD$setSource(source) + } else if (target instanceof CharSequence) { + ctx.getTaintedObjects().taint(target, Ranges.forCharSequence(target, source, mark)) } else { - ctx.taintedObjects.taintInputObject(toTaint, source) - objectHolder.add(toTaint) + ctx.getTaintedObjects().taint(target, Ranges.forObject(source, mark)) } - return toTaint + return getTaintedObject(target) } - private void assertTainted(final Object toTaint) { - final tainted = ctx.getTaintedObjects().get(toTaint) - if (toTaint instanceof Taintable) { - assert tainted == null - assert toTaint.$$DD$getSource() != null + private TaintedObject taintObject(final Object target, Range[] ranges) { + if (target instanceof Taintable) { + target.$$DD$setSource(ranges[0].getSource()) } else { - assert tainted != null + ctx.getTaintedObjects().taint(target, ranges) + } + return getTaintedObject(target) + } + + private String string(String value, Source source = null, int mark = NOT_MARKED) { + final result = new String(value) + if (source != null) { + taintObject(result, source, mark) } + return result } - private void assertNotTainted(final Object toTaint) { - final tainted = ctx.getTaintedObjects().get(toTaint) - assert tainted == null - if (toTaint instanceof Taintable) { - assert toTaint.$$DD$getSource() == null + private StringBuilder stringBuilder(String value, Source source = null, int mark = NOT_MARKED) { + final result = new StringBuilder(value) + if (source != null) { + taintObject(result, source, mark) } + return result } - private static Source getDefaultSource() { - return new Source(SourceTypes.REQUEST_PARAMETER_VALUE, null, null) + private Date date(Source source = null, int mark = NOT_MARKED) { + final result = new Date() + if (source != null) { + taintObject(result, source, mark) + } + return result } - private static int getMarks(int ... marks) { - int result = NOT_MARKED - for (int mark : marks) { - result = result | mark + private Taintable taintable(Source source = null) { + final result = new MockTaintable() + if (source != null) { + taintObject(result, source) } return result } - /** - * Mocking makes the test a bit more confusing*/ - private static final class MockTaintable implements Taintable { + private Source taintedSource(String value = 'value') { + return new Source(SourceTypes.REQUEST_PARAMETER_VALUE, 'name', value) + } + private static void assertTainted(final TaintedObject tainted, final Range[] ranges, final int mark = NOT_MARKED) { + assert tainted != null + final originalValue = tainted.get() + assert tainted.ranges.length == ranges.length + ranges.eachWithIndex { Range expected, int i -> + final range = tainted.ranges[i] + if (mark == NOT_MARKED) { + assert range.marks == expected.marks + } else { + assert (range.marks & mark) > 0 + } + final source = range.source + assert !source.value.is(originalValue): 'Weak value should not be retained by the source' + + final expectedSource = expected.source + assert source.origin == expectedSource.origin + assert source.name == expectedSource.name + assert source.value == expectedSource.value + } + } + + private static class MockTaintable implements Taintable { private Source source - @Override @SuppressWarnings('CodeNarc') + @Override Source $$DD$getSource() { return source } - @Override @SuppressWarnings('CodeNarc') + @Override void $$DD$setSource(Source source) { this.source = source } + + @Override + String toString() { + return Taintable.name + } } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/StringModuleRangeLimitForkedTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/StringModuleRangeLimitForkedTest.groovy new file mode 100644 index 00000000000..45d8ff8561c --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/StringModuleRangeLimitForkedTest.groovy @@ -0,0 +1,187 @@ +package com.datadog.iast.propagation + +import com.datadog.iast.IastModuleImplTestBase +import com.datadog.iast.IastRequestContext +import datadog.trace.api.config.IastConfig +import datadog.trace.api.gateway.RequestContext +import datadog.trace.api.gateway.RequestContextSlot +import datadog.trace.api.iast.propagation.StringModule +import datadog.trace.bootstrap.instrumentation.api.AgentSpan + +import static com.datadog.iast.taint.TaintUtils.* + +class StringModuleRangeLimitForkedTest extends IastModuleImplTestBase { + + private StringModule module + + private List objectHolder + + private AgentSpan span + + private IastRequestContext ctx + + private RequestContext reqCtx + + def setup() { + injectSysConfig(IastConfig.IAST_MAX_RANGE_COUNT, '2') + + module = new StringModuleImpl() + objectHolder = [] + span = Mock(AgentSpan) + tracer.activeSpan() >> span + reqCtx = Mock(RequestContext) + span.getRequestContext() >> reqCtx + ctx = new IastRequestContext() + reqCtx.getData(RequestContextSlot.IAST) >> ctx + } + + void 'onStringConcatFactory'() { + given: + final taintedObjects = ctx.getTaintedObjects() + args = args.collect { it -> + final item = addFromTaintFormat(taintedObjects, it) + objectHolder.add(item) + return item + } + final recipe = args.collect { '\u0001' }.join() + final recipeOffsets = (0..1<==', '==>2<=='] | '==>1<====>2<==' + ['==>1<==', '==>2<==', '==>3<=='] | '==>1<====>2<==3' + } + + void 'onString'() { + given: + final result = getStringFromTaintFormat(expected) + objectHolder.add(expected) + + and: + final taintedObjects = ctx.getTaintedObjects() + final fromTaintedDelimiter = addFromTaintFormat(taintedObjects, delimiter) + objectHolder.add(fromTaintedDelimiter) + + and: + final fromTaintedElements = new CharSequence[elements.size()] + elements.eachWithIndex { element, i -> + def el = addFromTaintFormat(taintedObjects, element) + objectHolder.add(el) + fromTaintedElements[i] = el + } + + when: + module.onStringJoin(result, fromTaintedDelimiter, fromTaintedElements) + + then: + final to = ctx.getTaintedObjects().get(result) + to != null + to.get() == result + taintFormat(to.get() as String, to.getRanges()) == expected + + where: + delimiter | elements | expected + "" | ['==>1<==', '==>2<=='] | '==>1<====>2<==' + "" | ['==>1<==', '==>2<==', '==>3<=='] | '==>1<====>2<==3' + "==>,<==" | ['1', '2', '3'] | '1==>,<==2==>,<==3' + "==>,<==" | ['1', '2', '3', '4'] | '1==>,<==2==>,<==3,4' + "==>,<==" | ['==>1<==', '==>2<==', '==>3<=='] | '==>1<====>,<==23' + } + + void 'onStringRepeat'() { + given: + final taintedObjects = ctx.getTaintedObjects() + self = addFromTaintFormat(taintedObjects, self) + objectHolder.add(self) + + and: + final result = getStringFromTaintFormat(expected) + objectHolder.add(expected) + + when: + module.onStringRepeat(self, count, result) + + then: + final to = ctx.getTaintedObjects().get(result) + to != null + to.get() == result + taintFormat(to.get() as String, to.getRanges()) == expected + + where: + self | count | expected + "==>b<==" | 2 | "==>b<====>b<==" + "==>b<==" | 3 | "==>b<====>b<==b" + "aa==>b<==" | 2 | "aa==>b<==aa==>b<==" + "aa==>b<==" | 3 | "aa==>b<==aa==>b<==aab" + "==>b<==cc" | 2 | "==>b<==cc==>b<==ccbcc" + "a==>b<==c" | 2 | "a==>b<==ca==>b<==c" + "a==>b<==c" | 3 | "a==>b<==ca==>b<==cabc" + } + + void 'onStringFormat'() { + given: + final to = ctx.getTaintedObjects() + final format = addFromTaintFormat(to, formatTainted) + final args = argsTainted.collect { + final value = taint(to, it) + objectHolder.add(value) + return value + } + final formatted = String.format(format, args as Object[]) + final expected = getStringFromTaintFormat(expectedTainted) + assert expected == formatted // validate expectation is OK + + when: + module.onStringFormat(format, args as Object[], formatted) + + then: + final tainted = to.get(formatted) + final formattedResult = taintFormat(formatted, tainted?.ranges) + assert formattedResult == expectedTainted: tainted?.ranges + + where: + formatTainted | argsTainted | expectedTainted + '%s%s' | ['==>1<==', '==>2<=='] | '==>1<====>2<==' + '%s%s%s' | ['==>1<==', '==>2<==', '==>3<=='] | '==>1<====>2<==3' + '==>%s<====>%s<==%s' | ['1', '2', '==>3<=='] | '==>1<====>2<==3' + '%s==>%s<====>%s<==' | ['1', '2', '==>3<=='] | '1==>2<====>3<==' + '%s%s==>%s<==' | ['==>1<==', '==>2<==', '3'] | '==>1<====>2<==3' + } + + void 'onStringFormat literals'() { + given: + final to = ctx.getTaintedObjects() + final args = argsTainted.collect { + final value = taint(to, it) + objectHolder.add(value) + return value + } + final expected = getStringFromTaintFormat(expectedTainted) + + when: + module.onStringFormat(literals, args as Object[], expected) + + then: + final tainted = to.get(expected) + final formattedResult = taintFormat(expected, tainted?.ranges) + assert formattedResult == expectedTainted: tainted?.ranges + + where: + literals | argsTainted | expectedTainted + ['', '', ''] | ['==>1<==', '==>2<=='] | '==>1<====>2<==' + ['', '', '', ''] | ['==>1<==', '==>2<==', '==>3<=='] | '==>1<====>2<==3' + } +} diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/StringModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/StringModuleTest.groovy index 11319301eaf..5e7dd245777 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/StringModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/propagation/StringModuleTest.groovy @@ -908,6 +908,34 @@ class StringModuleTest extends IastModuleImplTestBase { 'He==>llo %s!<==' | ['W==>or<==ld'] | 'He==>llo <==W==>or<==ld==>!<==' // tainted placeholder (3) [mixing with tainted parameter] } + void 'onStringFormat literals: #literals args: #argsTainted'() { + given: + final to = ctx.getTaintedObjects() + final args = argsTainted.collect { + final value = taint(to, it) + objectHolder.add(value) + return value + } + final expected = getStringFromTaintFormat(expectedTainted) + + when: + module.onStringFormat(literals, args as Object[], expected) + + then: + final tainted = to.get(expected) + final formattedResult = taintFormat(expected, tainted?.ranges) + assert formattedResult == expectedTainted: tainted?.ranges + + where: + literals | argsTainted | expectedTainted + ['Hello World!'] | [] | 'Hello World!' + ['', ' ', ''] | ['Hello', 'World!'] | 'Hello World!' + ['', ' ', ''] | ['He==>ll<==o', 'World==>!<=='] | 'He==>ll<==o World==>!<==' + ['Hello World!'] | [] | 'Hello World!' + ['Today is ', ''] | [date('yyyy.MM.dd', '2012.11.23')] | "Today is ==>${String.valueOf(date('yyyy.MM.dd', '2012.11.23'))}<==" + ['', ''] | ['He==>ll<==o', 'World==>!<=='] | 'He==>ll<==o' // extra args + } + void 'onSplit'() { given: final to = ctx.getTaintedObjects() diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/CommandInjectionModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/CommandInjectionModuleTest.groovy index 9bc9148bb30..30b8adf5166 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/CommandInjectionModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/CommandInjectionModuleTest.groovy @@ -11,7 +11,7 @@ import datadog.trace.api.iast.sink.CommandInjectionModule import datadog.trace.bootstrap.instrumentation.api.AgentSpan import groovy.transform.CompileDynamic -import static com.datadog.iast.model.Range.NOT_MARKED +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED import static com.datadog.iast.taint.TaintUtils.addFromTaintFormat import static com.datadog.iast.taint.TaintUtils.taintFormat @@ -27,7 +27,7 @@ class CommandInjectionModuleTest extends IastModuleImplTestBase { private AgentSpan span def setup() { - module = registerDependencies(new CommandInjectionModuleImpl()) + module = new CommandInjectionModuleImpl(dependencies) objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/HstsMissingHeaderModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/HstsMissingHeaderModuleTest.groovy new file mode 100644 index 00000000000..505776b3ea4 --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/HstsMissingHeaderModuleTest.groovy @@ -0,0 +1,170 @@ +package com.datadog.iast.sink + +import com.datadog.iast.IastModuleImplTestBase +import com.datadog.iast.IastRequestContext +import com.datadog.iast.RequestEndedHandler +import com.datadog.iast.model.Vulnerability +import com.datadog.iast.model.VulnerabilityType +import datadog.trace.api.gateway.Flow +import datadog.trace.api.gateway.IGSpanInfo +import datadog.trace.api.gateway.RequestContext +import datadog.trace.api.gateway.RequestContextSlot +import datadog.trace.api.iast.InstrumentationBridge +import datadog.trace.api.internal.TraceSegment +import datadog.trace.bootstrap.instrumentation.api.AgentSpan + +class HstsMissingHeaderModuleTest extends IastModuleImplTestBase { + + private List objectHolder + + private IastRequestContext ctx + + private HstsMissingHeaderModuleImpl module + + private AgentSpan span + + def setup() { + InstrumentationBridge.clearIastModules() + module = new HstsMissingHeaderModuleImpl(dependencies) + InstrumentationBridge.registerIastModule(module) + objectHolder = [] + ctx = new IastRequestContext() + final reqCtx = Mock(RequestContext) { + getData(RequestContextSlot.IAST) >> ctx + } + span = Mock(AgentSpan) { + getSpanId() >> 123456 + getRequestContext() >> reqCtx + } + } + + + void 'hsts vulnerability'() { + given: + Vulnerability savedVul1 + final iastCtx = Mock(IastRequestContext) + iastCtx.getxForwardedProto() >> 'https' + iastCtx.getContentType() >> "text/html" + final handler = new RequestEndedHandler(dependencies) + final TraceSegment traceSegment = Mock(TraceSegment) + final reqCtx = Mock(RequestContext) + reqCtx.getTraceSegment() >> traceSegment + reqCtx.getData(RequestContextSlot.IAST) >> iastCtx + final tags = Mock(Map) + tags.get("http.url") >> "https://localhost/a" + tags.get("http.status_code") >> 200i + final spanInfo = Mock(IGSpanInfo) + spanInfo.getTags() >> tags + IastRequestContext.get(span) >> iastCtx + + + when: + def flow = handler.apply(reqCtx, spanInfo) + + then: + flow.getAction() == Flow.Action.Noop.INSTANCE + flow.getResult() == null + 1 * reqCtx.getData(RequestContextSlot.IAST) >> iastCtx + 1 * reqCtx.getTraceSegment() >> traceSegment + 1 * traceSegment.setTagTop("_dd.iast.enabled", 1) + 1 * iastCtx.getTaintedObjects() >> null + 1 * overheadController.releaseRequest() + 1 * spanInfo.getTags() >> tags + 1 * tags.get('http.url') >> "https://localhost/a" + 1 * tags.get('http.status_code') >> 200i + 1 * iastCtx.getStrictTransportSecurity() + 1 * tracer.activeSpan() >> span + 1 * iastCtx.getContentType() >> "text/html" + 1 * reporter.report(_, _ as Vulnerability) >> { + savedVul1 = it[1] + } + + with(savedVul1) { + type == VulnerabilityType.HSTS_HEADER_MISSING + } + } + + + void 'no hsts vulnerability reported'() { + given: + final iastCtx = Mock(IastRequestContext) + iastCtx.getxForwardedProto() >> 'https' + iastCtx.getContentType() >> "text/html" + final handler = new RequestEndedHandler(dependencies) + final TraceSegment traceSegment = Mock(TraceSegment) + final reqCtx = Mock(RequestContext) + reqCtx.getTraceSegment() >> traceSegment + reqCtx.getData(RequestContextSlot.IAST) >> iastCtx + final tags = Mock(Map) + tags.get("http.url") >> url + tags.get("http.status_code") >> status + final spanInfo = Mock(IGSpanInfo) + spanInfo.getTags() >> tags + IastRequestContext.get(span) >> iastCtx + + + when: + def flow = handler.apply(reqCtx, spanInfo) + + then: + flow.getAction() == Flow.Action.Noop.INSTANCE + flow.getResult() == null + 1 * reqCtx.getData(RequestContextSlot.IAST) >> iastCtx + 1 * reqCtx.getTraceSegment() >> traceSegment + 1 * traceSegment.setTagTop("_dd.iast.enabled", 1) + 1 * iastCtx.getTaintedObjects() >> null + 1 * overheadController.releaseRequest() + 1 * spanInfo.getTags() >> tags + 1 * tags.get('http.url') >> url + 1 * tags.get('http.status_code') >> status + 1 * iastCtx.getStrictTransportSecurity() + 0 * _ + + where: + url | status + "https://localhost/a" | 307i + "https://localhost/a" | HttpURLConnection.HTTP_MOVED_PERM + "https://localhost/a" | HttpURLConnection.HTTP_MOVED_TEMP + "https://localhost/a" | HttpURLConnection.HTTP_NOT_MODIFIED + "https://localhost/a" | HttpURLConnection.HTTP_NOT_FOUND + "https://localhost/a" | HttpURLConnection.HTTP_GONE + "https://localhost/a" | HttpURLConnection.HTTP_INTERNAL_ERROR + } + + + + void 'throw exception if context is null'(){ + when: + module.onRequestEnd(null, null) + + then: + thrown(NullPointerException) + } + + void 'exception not thrown if igSpanInfo is null'(){ + when: + module.onRequestEnd(ctx, null) + + then: + noExceptionThrown() + } + + void 'test max age'(){ + when: + final result = HstsMissingHeaderModuleImpl.isValidMaxAge(value) + + then: + result == expected + + where: + value | expected + "max-age=0" | false + "max-age=-1" | false + null | false + "" | false + "max-age-3" | false + "ramdom" | false + "max-age=10" | true + "max-age=0122344" | true + } +} diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/HttpResponseHeaderModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/HttpResponseHeaderModuleTest.groovy index 34e26d60e99..f3e30e1e180 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/HttpResponseHeaderModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/HttpResponseHeaderModuleTest.groovy @@ -1,12 +1,15 @@ package com.datadog.iast.sink + import com.datadog.iast.IastModuleImplTestBase import com.datadog.iast.IastRequestContext import com.datadog.iast.model.Vulnerability import com.datadog.iast.model.VulnerabilityType +import com.datadog.iast.taint.TaintedObjects import datadog.trace.api.gateway.RequestContext import datadog.trace.api.gateway.RequestContextSlot import datadog.trace.api.iast.InstrumentationBridge +import datadog.trace.api.iast.telemetry.IastMetricCollector import datadog.trace.api.iast.util.Cookie import datadog.trace.bootstrap.instrumentation.api.AgentSpan @@ -22,10 +25,13 @@ class HttpResponseHeaderModuleTest extends IastModuleImplTestBase { def setup() { InstrumentationBridge.clearIastModules() - module = registerDependencies(new HttpResponseHeaderModuleImpl()) + module = new HttpResponseHeaderModuleImpl(dependencies) + InstrumentationBridge.registerIastModule(module) InstrumentationBridge.registerIastModule(new InsecureCookieModuleImpl()) InstrumentationBridge.registerIastModule(new NoHttpOnlyCookieModuleImpl()) InstrumentationBridge.registerIastModule(new NoSameSiteCookieModuleImpl()) + InstrumentationBridge.registerIastModule(new HstsMissingHeaderModuleImpl(dependencies)) + InstrumentationBridge.registerIastModule(new UnvalidatedRedirectModuleImpl(dependencies)) objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { @@ -52,6 +58,7 @@ class HttpResponseHeaderModuleTest extends IastModuleImplTestBase { then: 1 * tracer.activeSpan() >> span 1 * span.getSpanId() + 1 * span.getServiceName() 1 * overheadController.consumeQuota(_, _) >> true 1 * reporter.report(_, _ as Vulnerability) >> { onReport.call(it[1] as Vulnerability) } 1 * reporter.report(_, _ as Vulnerability) >> { onReport.call(it[1] as Vulnerability) } @@ -81,10 +88,42 @@ class HttpResponseHeaderModuleTest extends IastModuleImplTestBase { void 'exercise onHeader'() { when: module.onHeader("Set-Cookie", "user-id=7") + module.onHeader("X-Content-Type-Options", "nosniff") + module.onHeader("Content-Type", "text/html") + module.onHeader("Strict-Transport-Security", "invalid max age") then: - 1 * tracer.activeSpan() + 4 * tracer.activeSpan() 1 * overheadController.consumeQuota(_,_) 0 * _ } + + void 'exercise IastRequestController'(){ + given: + final taintedObjects = Mock(TaintedObjects) + IastRequestContext ctx = new IastRequestContext(taintedObjects) + + when: + ctx.setxForwardedProto('https') + + then: + ctx.getxForwardedProto() == 'https' + } + + void 'exercise IastRequestContext'(){ + given: + final taintedObjects = Mock(TaintedObjects) + final iastMetricsCollector = Mock(IastMetricCollector) + + when: + IastRequestContext ctx = new IastRequestContext(taintedObjects, iastMetricsCollector) + ctx.setxForwardedProto('https') + ctx.setContentType("text/html") + ctx.setxContentTypeOptions('nosniff') + ctx.getxContentTypeOptions() + ctx.setStrictTransportSecurity('max-age=2345') + + then: + 0 * _ + } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/InsecureCookieModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/InsecureCookieModuleTest.groovy index bf74a709519..b160f37a28b 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/InsecureCookieModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/InsecureCookieModuleTest.groovy @@ -24,7 +24,7 @@ class InsecureCookieModuleTest extends IastModuleImplTestBase { def setup() { InstrumentationBridge.clearIastModules() - module = registerDependencies(new HttpResponseHeaderModuleImpl()) + module = new HttpResponseHeaderModuleImpl(dependencies) InstrumentationBridge.registerIastModule(new InsecureCookieModuleImpl()) objectHolder = [] ctx = new IastRequestContext() diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/LdapInjectionModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/LdapInjectionModuleTest.groovy index 2159276a958..682e8dfef39 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/LdapInjectionModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/LdapInjectionModuleTest.groovy @@ -11,7 +11,7 @@ import datadog.trace.api.iast.sink.LdapInjectionModule import datadog.trace.bootstrap.instrumentation.api.AgentSpan import groovy.transform.CompileDynamic -import static com.datadog.iast.model.Range.NOT_MARKED +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED import static com.datadog.iast.taint.TaintUtils.addFromTaintFormat import static com.datadog.iast.taint.TaintUtils.taintFormat @@ -27,7 +27,7 @@ class LdapInjectionModuleTest extends IastModuleImplTestBase { private AgentSpan span def setup() { - module = registerDependencies(new LdapInjectionModuleImpl()) + module = new LdapInjectionModuleImpl(dependencies) objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/NoHttpCookieModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/NoHttpCookieModuleTest.groovy index 28fda1aeecb..a246f30ee9f 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/NoHttpCookieModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/NoHttpCookieModuleTest.groovy @@ -24,7 +24,7 @@ class NoHttpCookieModuleTest extends IastModuleImplTestBase { def setup() { InstrumentationBridge.clearIastModules() - module = registerDependencies(new HttpResponseHeaderModuleImpl()) + module = new HttpResponseHeaderModuleImpl(dependencies) InstrumentationBridge.registerIastModule(new NoHttpOnlyCookieModuleImpl()) objectHolder = [] ctx = new IastRequestContext() diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/NoSameSiteCookieModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/NoSameSiteCookieModuleTest.groovy index f4a97aaffa1..4e32a9b7722 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/NoSameSiteCookieModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/NoSameSiteCookieModuleTest.groovy @@ -24,7 +24,7 @@ class NoSameSiteCookieModuleTest extends IastModuleImplTestBase { def setup() { InstrumentationBridge.clearIastModules() - module = registerDependencies(new HttpResponseHeaderModuleImpl()) + module = new HttpResponseHeaderModuleImpl(dependencies) InstrumentationBridge.registerIastModule(new NoSameSiteCookieModuleImpl()) objectHolder = [] ctx = new IastRequestContext() diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/PathTraversalModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/PathTraversalModuleTest.groovy index d55e2975e7e..6c25122e393 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/PathTraversalModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/PathTraversalModuleTest.groovy @@ -10,7 +10,7 @@ import datadog.trace.api.iast.VulnerabilityMarks import datadog.trace.api.iast.sink.PathTraversalModule import datadog.trace.bootstrap.instrumentation.api.AgentSpan -import static com.datadog.iast.model.Range.NOT_MARKED +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED import static com.datadog.iast.taint.TaintUtils.addFromTaintFormat import static com.datadog.iast.taint.TaintUtils.fromTaintFormat import static com.datadog.iast.taint.TaintUtils.getStringFromTaintFormat @@ -27,7 +27,7 @@ class PathTraversalModuleTest extends IastModuleImplTestBase { private IastRequestContext ctx def setup() { - module = registerDependencies(new PathTraversalModuleImpl()) + module = new PathTraversalModuleImpl(dependencies) objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/SqlInjectionModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/SqlInjectionModuleTest.groovy index 3041c928a2a..69b30fe5ef3 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/SqlInjectionModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/SqlInjectionModuleTest.groovy @@ -10,7 +10,7 @@ import datadog.trace.api.iast.VulnerabilityMarks import datadog.trace.api.iast.sink.SqlInjectionModule import datadog.trace.bootstrap.instrumentation.api.AgentSpan -import static com.datadog.iast.model.Range.NOT_MARKED +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED import static com.datadog.iast.taint.TaintUtils.addFromTaintFormat import static com.datadog.iast.taint.TaintUtils.taintFormat import static datadog.trace.api.iast.sink.SqlInjectionModule.DATABASE_PARAMETER @@ -24,7 +24,7 @@ class SqlInjectionModuleTest extends IastModuleImplTestBase { private IastRequestContext ctx def setup() { - module = registerDependencies(new SqlInjectionModuleImpl()) + module = new SqlInjectionModuleImpl(dependencies) objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/SsrfModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/SsrfModuleTest.groovy index 6884da0e616..c379cea4b8f 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/SsrfModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/SsrfModuleTest.groovy @@ -6,6 +6,7 @@ import com.datadog.iast.model.Range import com.datadog.iast.model.Source import com.datadog.iast.model.Vulnerability import com.datadog.iast.model.VulnerabilityType +import com.datadog.iast.taint.Ranges import datadog.trace.api.gateway.RequestContext import datadog.trace.api.gateway.RequestContextSlot import datadog.trace.api.iast.SourceTypes @@ -24,7 +25,7 @@ class SsrfModuleTest extends IastModuleImplTestBase { private AgentSpan span def setup() { - module = registerDependencies(new SsrfModuleImpl()) + module = new SsrfModuleImpl(dependencies) objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { @@ -82,6 +83,6 @@ class SsrfModuleTest extends IastModuleImplTestBase { } private void taint(final Object value) { - ctx.getTaintedObjects().taintInputObject(value, new Source(SourceTypes.REQUEST_PARAMETER_VALUE, 'name', value.toString())) + ctx.getTaintedObjects().taint(value, Ranges.forObject(new Source(SourceTypes.REQUEST_PARAMETER_VALUE, 'name', value.toString()))) } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/TrustBoundaryViolationModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/TrustBoundaryViolationModuleTest.groovy index c8120abab73..a3fce16d195 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/TrustBoundaryViolationModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/TrustBoundaryViolationModuleTest.groovy @@ -5,11 +5,13 @@ import com.datadog.iast.IastRequestContext import com.datadog.iast.model.Source import com.datadog.iast.model.Vulnerability import com.datadog.iast.model.VulnerabilityType +import com.datadog.iast.taint.Ranges import datadog.trace.api.gateway.RequestContext import datadog.trace.api.gateway.RequestContextSlot import datadog.trace.api.iast.InstrumentationBridge import datadog.trace.api.iast.SourceTypes import datadog.trace.bootstrap.instrumentation.api.AgentSpan +import foo.bar.VisitableClass class TrustBoundaryViolationModuleTest extends IastModuleImplTestBase { private List objectHolder @@ -23,7 +25,7 @@ class TrustBoundaryViolationModuleTest extends IastModuleImplTestBase { def setup() { InstrumentationBridge.clearIastModules() - module = registerDependencies(new TrustBoundaryViolationModuleImpl()) + module = new TrustBoundaryViolationModuleImpl(dependencies) objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { @@ -35,11 +37,31 @@ class TrustBoundaryViolationModuleTest extends IastModuleImplTestBase { } } + void 'report TrustBoundary vulnerability without context'() { + when: + module.onSessionValue('test', null) + + then: + 1 * tracer.activeSpan() >> null + 0 * overheadController.consumeQuota(_, _) + 0 * reporter._ + } + + void 'report TrustBoundary vulnerability for null value'() { + when: + module.onSessionValue('test', null) + + then: + 1 * tracer.activeSpan() >> span + 0 * overheadController.consumeQuota(_, _) + 0 * reporter._ + } + void 'report TrustBoundary vulnerability for tainted name'() { given: Vulnerability savedVul final name = "name" - ctx.getTaintedObjects().taintInputString(name, new Source(SourceTypes.NONE, null, null)) + ctx.getTaintedObjects().taint(name, Ranges.forCharSequence(name, new Source(SourceTypes.NONE, null, null))) when: module.onSessionValue(name, "value") @@ -56,7 +78,7 @@ class TrustBoundaryViolationModuleTest extends IastModuleImplTestBase { Vulnerability savedVul final name = "name" final badValue = "theValue" - ctx.getTaintedObjects().taintInputString(badValue, new Source(SourceTypes.NONE, null, null)) + ctx.getTaintedObjects().taint(badValue, Ranges.forCharSequence(badValue, new Source(SourceTypes.NONE, null, null))) when: module.onSessionValue(name, badValue) @@ -74,7 +96,7 @@ class TrustBoundaryViolationModuleTest extends IastModuleImplTestBase { Vulnerability savedVul final name = "name" final badValue = "badValue" - ctx.getTaintedObjects().taintInputString(badValue, new Source(SourceTypes.NONE, null, null)) + ctx.getTaintedObjects().taint(badValue, Ranges.forCharSequence(badValue, new Source(SourceTypes.NONE, null, null))) final values = ["A", "B", badValue] when: @@ -92,7 +114,7 @@ class TrustBoundaryViolationModuleTest extends IastModuleImplTestBase { Vulnerability savedVul final name = "name" final badValue = "badValue" - ctx.getTaintedObjects().taintInputString(badValue, new Source(SourceTypes.NONE, null, null)) + ctx.getTaintedObjects().taint(badValue, Ranges.forCharSequence(badValue, new Source(SourceTypes.NONE, null, null))) final values = new String[3] values[0] = "A" values[1] = "B" @@ -113,7 +135,7 @@ class TrustBoundaryViolationModuleTest extends IastModuleImplTestBase { Vulnerability savedVul final name = "name" final badValue = "badValue" - ctx.getTaintedObjects().taintInputString(badValue, new Source(SourceTypes.NONE, null, null)) + ctx.getTaintedObjects().taint(badValue, Ranges.forCharSequence(badValue, new Source(SourceTypes.NONE, null, null))) final values = new LinkedHashMap() values.put("A", "A") values.put("B", "B") @@ -129,6 +151,23 @@ class TrustBoundaryViolationModuleTest extends IastModuleImplTestBase { assertVulnerability(savedVul, badValue) } + void 'report TrustBoundary vulnerability for tainted value within custom class'() { + given: + Vulnerability savedVul + final name = "name" + final badValue = "badValue" + ctx.getTaintedObjects().taint(badValue, Ranges.forCharSequence(badValue, new Source(SourceTypes.NONE, null, null))) + final value = new VisitableClass(name: badValue) + + when: + module.onSessionValue(name, value) + + then: + 1 * tracer.activeSpan() >> span + 1 * overheadController.consumeQuota(_, _) >> true + 1 * reporter.report(_, _ as Vulnerability) >> { savedVul = it[1] } + assertVulnerability(savedVul, badValue) + } private static void assertVulnerability(final Vulnerability vuln, String expectedValue) { assert vuln != null diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/UnvalidatedRedirectModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/UnvalidatedRedirectModuleTest.groovy index fc22237fa09..5dabb12cc52 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/UnvalidatedRedirectModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/UnvalidatedRedirectModuleTest.groovy @@ -6,6 +6,7 @@ import com.datadog.iast.model.Range import com.datadog.iast.model.Source import com.datadog.iast.model.Vulnerability import com.datadog.iast.model.VulnerabilityType +import com.datadog.iast.taint.Ranges import datadog.trace.api.gateway.RequestContext import datadog.trace.api.gateway.RequestContextSlot import datadog.trace.api.iast.InstrumentationBridge @@ -14,7 +15,7 @@ import datadog.trace.api.iast.VulnerabilityMarks import datadog.trace.api.iast.sink.UnvalidatedRedirectModule import datadog.trace.bootstrap.instrumentation.api.AgentSpan -import static com.datadog.iast.model.Range.NOT_MARKED +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED import static com.datadog.iast.taint.TaintUtils.addFromTaintFormat import static com.datadog.iast.taint.TaintUtils.taintFormat @@ -27,7 +28,7 @@ class UnvalidatedRedirectModuleTest extends IastModuleImplTestBase { private IastRequestContext ctx def setup() { - module = registerDependencies(new UnvalidatedRedirectModuleImpl()) + module = new UnvalidatedRedirectModuleImpl(dependencies) objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { @@ -64,7 +65,7 @@ class UnvalidatedRedirectModuleTest extends IastModuleImplTestBase { void 'iast module detects URI redirect (#value)'(final URI value, final String expected) { setup: - ctx.taintedObjects.taintInputObject(value, new Source(SourceTypes.NONE, null, null)) + ctx.taintedObjects.taint(value, Ranges.forObject(new Source(SourceTypes.NONE, null, null))) when: module.onURIRedirect(value) @@ -107,7 +108,7 @@ class UnvalidatedRedirectModuleTest extends IastModuleImplTestBase { void 'if onHeader receives a Location header call onRedirect'() { setup: - final urm = Spy(UnvalidatedRedirectModuleImpl) + final urm = Spy(new UnvalidatedRedirectModuleImpl(dependencies)) InstrumentationBridge.registerIastModule(urm) when: @@ -157,6 +158,10 @@ class UnvalidatedRedirectModuleTest extends IastModuleImplTestBase { new Range(0, 2, new Source(SourceTypes.REQUEST_HEADER_VALUE, 'referer', 'value'), NOT_MARKED), new Range(4, 1, new Source(SourceTypes.REQUEST_PARAMETER_NAME, 'referer', 'value'), NOT_MARKED) ] + 'test03' | [ + new Range(0, 2, new Source(SourceTypes.REQUEST_HEADER_VALUE, null, null), NOT_MARKED), + new Range(4, 1, new Source(SourceTypes.REQUEST_PARAMETER_NAME, 'referer', 'value'), NOT_MARKED) + ] } void 'If all ranges from tainted element have unvalidated redirect mark vulnerability is not reported'() { diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakCipherModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakCipherModuleTest.groovy index 3fec5882597..802df43ec28 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakCipherModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakCipherModuleTest.groovy @@ -12,7 +12,7 @@ class WeakCipherModuleTest extends IastModuleImplTestBase { private WeakCipherModule module def setup() { - module = registerDependencies(new WeakCipherModuleImpl()) + module = new WeakCipherModuleImpl(dependencies) } void 'iast module vulnerable cipher algorithm'(final String algorithm){ @@ -26,6 +26,7 @@ class WeakCipherModuleTest extends IastModuleImplTestBase { then: 1 * tracer.activeSpan() >> span 1 * span.getSpanId() >> spanId + 1 * span.getServiceName() 1 * overheadController.consumeQuota(_, _) >> true 1 * reporter.report(_, _) >> { args -> Vulnerability vuln = args[1] as Vulnerability diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakHashModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakHashModuleTest.groovy index 0d84ab3b8a6..2cfebb744e0 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakHashModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakHashModuleTest.groovy @@ -12,7 +12,7 @@ class WeakHashModuleTest extends IastModuleImplTestBase { private WeakHashModule module def setup() { - module = registerDependencies(new WeakHashModuleImpl()) + module = new WeakHashModuleImpl(dependencies) } void 'iast module vulnerable hash algorithm'(final String algorithm){ @@ -26,6 +26,7 @@ class WeakHashModuleTest extends IastModuleImplTestBase { then: 1 * tracer.activeSpan() >> span 1 * span.getSpanId() >> spanId + 1 * span.getServiceName() 1 * overheadController.consumeQuota(_, _) >> true 1 * reporter.report(_, _) >> { args -> Vulnerability vuln = args[1] as Vulnerability diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakRandomnessModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakRandomnessModuleTest.groovy index 3b32d0afa88..fa3db3c8f19 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakRandomnessModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/WeakRandomnessModuleTest.groovy @@ -1,6 +1,7 @@ package com.datadog.iast.sink import com.datadog.iast.IastModuleImplTestBase +import com.datadog.iast.overhead.Operations import datadog.trace.api.iast.sink.WeakRandomnessModule import datadog.trace.bootstrap.instrumentation.api.AgentSpan @@ -13,7 +14,7 @@ class WeakRandomnessModuleTest extends IastModuleImplTestBase { private AgentSpan span def setup() { - module = registerDependencies(new WeakRandomnessModuleImpl()) + module = new WeakRandomnessModuleImpl(dependencies) span = Mock(AgentSpan) { getSpanId() >> 123456 } @@ -28,6 +29,7 @@ class WeakRandomnessModuleTest extends IastModuleImplTestBase { 0 * _ } else { tracer.activeSpan() >> span + 1 * overheadController.consumeQuota(Operations.REPORT_VULNERABILITY, span) >> true 1 * reporter.report(span, _) } @@ -39,6 +41,7 @@ class WeakRandomnessModuleTest extends IastModuleImplTestBase { 0 * _ } else { tracer.activeSpan() >> null + 1 * overheadController.consumeQuota(Operations.REPORT_VULNERABILITY, null) >> true 1 * reporter.report(_, _) } @@ -47,4 +50,14 @@ class WeakRandomnessModuleTest extends IastModuleImplTestBase { Random | false SecureRandom | true } + + void 'test nothing is reported if no quota available'() { + when: + module.onWeakRandom(Random) + + then: + tracer.activeSpan() >> span + 1 * overheadController.consumeQuota(Operations.REPORT_VULNERABILITY, span) >> false + 0 * _ + } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XContentTypeOptionsModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XContentTypeOptionsModuleTest.groovy new file mode 100644 index 00000000000..a8d377e968b --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XContentTypeOptionsModuleTest.groovy @@ -0,0 +1,140 @@ +package com.datadog.iast.sink + +import com.datadog.iast.IastModuleImplTestBase +import com.datadog.iast.IastRequestContext +import com.datadog.iast.RequestEndedHandler +import com.datadog.iast.model.Vulnerability +import com.datadog.iast.model.VulnerabilityType +import datadog.trace.api.gateway.Flow +import datadog.trace.api.gateway.IGSpanInfo +import datadog.trace.api.gateway.RequestContext +import datadog.trace.api.gateway.RequestContextSlot +import datadog.trace.api.iast.InstrumentationBridge +import datadog.trace.api.internal.TraceSegment +import datadog.trace.bootstrap.instrumentation.api.AgentSpan + +public class XContentTypeOptionsModuleTest extends IastModuleImplTestBase { + + private List objectHolder + + private IastRequestContext ctx + + private XContentTypeModuleImpl module + + private AgentSpan span + + def setup() { + InstrumentationBridge.clearIastModules() + module = new XContentTypeModuleImpl(dependencies) + InstrumentationBridge.registerIastModule(module) + objectHolder = [] + ctx = new IastRequestContext() + final reqCtx = Mock(RequestContext) { + getData(RequestContextSlot.IAST) >> ctx + } + span = Mock(AgentSpan) { + getSpanId() >> 123456 + getRequestContext() >> reqCtx + } + } + + + void 'x content options sniffing vulnerability'() { + given: + Vulnerability savedVul1 + final iastCtx = Mock(IastRequestContext) + iastCtx.getContentType() >> "text/html" + final handler = new RequestEndedHandler(dependencies) + final TraceSegment traceSegment = Mock(TraceSegment) + final reqCtx = Mock(RequestContext) + reqCtx.getTraceSegment() >> traceSegment + reqCtx.getData(RequestContextSlot.IAST) >> iastCtx + final tags = Mock(Map) + tags.get("http.status_code") >> 200i + final spanInfo = Mock(IGSpanInfo) + spanInfo.getTags() >> tags + IastRequestContext.get(span) >> iastCtx + + + when: + def flow = handler.apply(reqCtx, spanInfo) + + then: + flow.getAction() == Flow.Action.Noop.INSTANCE + flow.getResult() == null + 1 * reqCtx.getData(RequestContextSlot.IAST) >> iastCtx + 1 * reqCtx.getTraceSegment() >> traceSegment + 1 * traceSegment.setTagTop("_dd.iast.enabled", 1) + 1 * iastCtx.getTaintedObjects() >> null + 1 * overheadController.releaseRequest() + 1 * spanInfo.getTags() >> tags + 1 * tags.get('http.status_code') >> 200i + 1 * iastCtx.getxContentTypeOptions() >> null + 1 * tracer.activeSpan() >> span + 1 * iastCtx.getContentType() >> "text/html" + 1 * reporter.report(_, _ as Vulnerability) >> { + savedVul1 = it[1] + } + + with(savedVul1) { + type == VulnerabilityType.XCONTENTTYPE_HEADER_MISSING + } + } + + + void 'no x content options sniffing reported'() { + given: + final iastCtx = Mock(IastRequestContext) + iastCtx.getxForwardedProto() >> 'https' + iastCtx.getContentType() >> "text/html" + final handler = new RequestEndedHandler(dependencies) + final TraceSegment traceSegment = Mock(TraceSegment) + final reqCtx = Mock(RequestContext) + reqCtx.getTraceSegment() >> traceSegment + reqCtx.getData(RequestContextSlot.IAST) >> iastCtx + final tags = Mock(Map) + tags.get("http.url") >> url + tags.get("http.status_code") >> status + final spanInfo = Mock(IGSpanInfo) + spanInfo.getTags() >> tags + IastRequestContext.get(span) >> iastCtx + + + when: + def flow = handler.apply(reqCtx, spanInfo) + + then: + flow.getAction() == Flow.Action.Noop.INSTANCE + flow.getResult() == null + 1 * reqCtx.getData(RequestContextSlot.IAST) >> iastCtx + 1 * reqCtx.getTraceSegment() >> traceSegment + 1 * traceSegment.setTagTop("_dd.iast.enabled", 1) + 1 * iastCtx.getTaintedObjects() >> null + 1 * overheadController.releaseRequest() + 1 * spanInfo.getTags() >> tags + 1 * iastCtx.getContentType() >> "text/html" + 1 * tags.get('http.status_code') >> status + 1 * iastCtx.getxContentTypeOptions() + 0 * _ + + where: + url | status + "https://localhost/a" | 307i + "https://localhost/a" | HttpURLConnection.HTTP_MOVED_PERM + "https://localhost/a" | HttpURLConnection.HTTP_MOVED_TEMP + "https://localhost/a" | HttpURLConnection.HTTP_NOT_MODIFIED + "https://localhost/a" | HttpURLConnection.HTTP_NOT_FOUND + "https://localhost/a" | HttpURLConnection.HTTP_GONE + "https://localhost/a" | HttpURLConnection.HTTP_INTERNAL_ERROR + } + + + + void 'throw exception if context is null'(){ + when: + module.onRequestEnd(null, null) + + then: + noExceptionThrown() + } +} diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XPathInjectionModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XPathInjectionModuleTest.groovy index 6a44afd10b8..3638b6f431b 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XPathInjectionModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XPathInjectionModuleTest.groovy @@ -10,7 +10,7 @@ import datadog.trace.api.iast.VulnerabilityMarks import datadog.trace.api.iast.sink.XPathInjectionModule import datadog.trace.bootstrap.instrumentation.api.AgentSpan -import static com.datadog.iast.model.Range.NOT_MARKED +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED import static com.datadog.iast.taint.TaintUtils.addFromTaintFormat import static com.datadog.iast.taint.TaintUtils.taintFormat @@ -23,7 +23,7 @@ class XPathInjectionModuleTest extends IastModuleImplTestBase { private IastRequestContext ctx def setup() { - module = registerDependencies(new XPathInjectionModuleImpl()) + module = new XPathInjectionModuleImpl(dependencies) objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XssModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XssModuleTest.groovy index 76ebed3b3a6..bbebbceae97 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XssModuleTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/sink/XssModuleTest.groovy @@ -5,6 +5,7 @@ import com.datadog.iast.IastRequestContext import com.datadog.iast.model.Source import com.datadog.iast.model.Vulnerability import com.datadog.iast.model.VulnerabilityType +import com.datadog.iast.taint.Ranges import datadog.trace.api.gateway.RequestContext import datadog.trace.api.gateway.RequestContextSlot import datadog.trace.api.iast.SourceTypes @@ -12,7 +13,7 @@ import datadog.trace.api.iast.VulnerabilityMarks import datadog.trace.api.iast.sink.XssModule import datadog.trace.bootstrap.instrumentation.api.AgentSpan -import static com.datadog.iast.model.Range.NOT_MARKED +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED import static com.datadog.iast.taint.TaintUtils.addFromTaintFormat import static com.datadog.iast.taint.TaintUtils.taintFormat @@ -25,7 +26,7 @@ class XssModuleTest extends IastModuleImplTestBase { private IastRequestContext ctx def setup() { - module = registerDependencies(new XssModuleImpl()) + module = new XssModuleImpl(dependencies) objectHolder = [] ctx = new IastRequestContext() final reqCtx = Mock(RequestContext) { @@ -65,7 +66,7 @@ class XssModuleTest extends IastModuleImplTestBase { void 'module detects char[] XSS'() { setup: if (tainted) { - ctx.taintedObjects.taintInputObject(buf, new Source(SourceTypes.NONE, '', ''), mark) + ctx.taintedObjects.taint(buf, Ranges.forObject(new Source(SourceTypes.NONE, '', ''), mark)) } when: @@ -119,6 +120,54 @@ class XssModuleTest extends IastModuleImplTestBase { '/==>var<==' | ['a', 'b'] | VulnerabilityMarks.SQL_INJECTION_MARK | "/==>var<== a b" } + void 'module detects Charsequence XSS with file and line'() { + setup: + final param = mapTainted(s, mark) + + when: + module.onXss(param as CharSequence, file as String, line as int) + + then: + if (expected != null) { + 1 * reporter.report(_, _) >> { args -> assertEvidence(args[1] as Vulnerability, expected) } + } else { + 0 * reporter.report(_, _) + } + + where: + s | file | line | mark | expected + null | 'test' | 3 | NOT_MARKED | null + '/var' | 'test' | 3 | NOT_MARKED | null + '/==>var<=='| 'test' | 3 | NOT_MARKED | "/==>var<==" + '/==>var<=='| 'test' | 3 | VulnerabilityMarks.XSS_MARK | null + '/==>var<=='| 'test' | 3 | VulnerabilityMarks.SQL_INJECTION_MARK | "/==>var<==" + '/==>var<=='| null | 3 | VulnerabilityMarks.SQL_INJECTION_MARK | null + } + + void 'iast module detects String xss with class and method (#value)'() { + setup: + final param = mapTainted(value, mark) + final clazz = "class" + final method = "method" + + when: + module.onXss(param, clazz, method) + + then: + if (expected != null) { + 1 * reporter.report(_, _) >> { args -> assertEvidence(args[1] as Vulnerability, expected) } + } else { + 0 * reporter.report(_, _) + } + + where: + value | mark| expected + null | NOT_MARKED| null + '/var' | NOT_MARKED| null + '/==>var<==' | VulnerabilityMarks.XSS_MARK| null + '/==>var<==' | VulnerabilityMarks.SQL_INJECTION_MARK| "/==>var<==" + } + private String mapTainted(final String value, final int mark) { final result = addFromTaintFormat(ctx.taintedObjects, value, mark) diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/source/WebModuleTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/source/WebModuleTest.groovy deleted file mode 100644 index 1e93f5e3fb4..00000000000 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/source/WebModuleTest.groovy +++ /dev/null @@ -1,130 +0,0 @@ -package com.datadog.iast.source - -import com.datadog.iast.IastModuleImplTestBase -import com.datadog.iast.IastRequestContext -import com.datadog.iast.model.Source -import datadog.trace.api.gateway.RequestContext -import datadog.trace.api.gateway.RequestContextSlot -import datadog.trace.api.iast.SourceTypes -import datadog.trace.api.iast.source.WebModule -import datadog.trace.bootstrap.instrumentation.api.AgentSpan -import groovy.transform.CompileDynamic - -@CompileDynamic -class WebModuleTest extends IastModuleImplTestBase { - - private WebModule module - - def setup() { - module = new WebModuleImpl() - } - - void 'test #method: null or empty'() { - when: - module."$method"(*args) - - then: - 0 * _ - - where: - method | args - 'onParameterNames' | [null] - 'onParameterNames' | [[]] - 'onParameterValues' | [null, null] - 'onParameterValues' | ['', []] - 'onParameterValues' | [null, null as String[]] - 'onParameterValues' | ['', [] as String[]] - 'onParameterValues' | [[:]] - 'onHeaderNames' | [null] - 'onHeaderNames' | [[]] - 'onHeaderValues' | [null, null] - 'onHeaderValues' | ['', []] - 'onCookieNames' | [null] - 'onCookieNames' | [[]] - } - - void 'test #method: without span'() { - when: - module."$method"(*args) - - then: - 1 * tracer.activeSpan() >> null - 0 * _ - - where: - method | args - 'onParameterNames' | [['param']] - 'onParameterValues' | ['name', ['value']] - 'onParameterValues' | ['name', ['value'] as String[]] - 'onParameterValues' | [[name: ['value'] as String[]]] - 'onHeaderNames' | [['header']] - 'onHeaderValues' | ['name', ['value']] - 'onCookieNames' | [['name']] - 'onNamed' | ['name', ['v1'], (byte)0] - 'onNamed' | ['name', ['v1'] as String[], (byte)0] - 'onNamed' | [[name: 'v1'], (byte)0] - } - - void 'onNamed #variant'() { - given: - final span = Mock(AgentSpan) - tracer.activeSpan() >> span - final reqCtx = Mock(RequestContext) - span.getRequestContext() >> reqCtx - final ctx = new IastRequestContext() - reqCtx.getData(RequestContextSlot.IAST) >> ctx - - when: - module.onNamed(*args, SourceTypes.REQUEST_PARAMETER_NAME) - - then: - 1 * tracer.activeSpan() >> span - 1 * span.getRequestContext() >> reqCtx - 1 * reqCtx.getData(RequestContextSlot.IAST) >> ctx - 0 * _ - def tos = ctx.taintedObjects - def to = tos.get('foo') - to.ranges.size() == 1 - to.ranges[0].start == 0 - to.ranges[0].length == 3 - to.ranges[0].source == new Source(SourceTypes.REQUEST_PARAMETER_NAME, 'var', 'foo') - - where: - variant | args - 'collection' | ['var', ['foo']] - 'array' | ['var', ['foo'] as String[]] - 'map' | [[var: ['foo'] as String[]]] - } - - void 'test #method'() { - given: - final span = Mock(AgentSpan) - tracer.activeSpan() >> span - final reqCtx = Mock(RequestContext) - span.getRequestContext() >> reqCtx - final ctx = new IastRequestContext() - reqCtx.getData(RequestContextSlot.IAST) >> ctx - - when: - module."$method"([name]) - - then: - 1 * tracer.activeSpan() >> span - 1 * span.getRequestContext() >> reqCtx - 1 * reqCtx.getData(RequestContextSlot.IAST) >> ctx - 0 * _ - def to = ctx.getTaintedObjects().get(name) - to != null - to.get() == name - to.ranges.size() == 1 - to.ranges[0].start == 0 - to.ranges[0].length == name.length() - to.ranges[0].source == new Source(source, name, name) - - where: - method | name | source - 'onParameterNames' | 'param' | SourceTypes.REQUEST_PARAMETER_NAME - 'onHeaderNames' | 'param' | SourceTypes.REQUEST_HEADER_NAME - 'onCookieNames' | 'param' | SourceTypes.REQUEST_COOKIE_NAME - } -} diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/RangesTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/RangesTest.groovy index de86e5a4f90..60525afa7e2 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/RangesTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/RangesTest.groovy @@ -3,23 +3,27 @@ package com.datadog.iast.taint import com.datadog.iast.model.Range import com.datadog.iast.model.Source import com.datadog.iast.model.VulnerabilityType +import datadog.trace.api.Config import datadog.trace.api.iast.SourceTypes import datadog.trace.api.iast.VulnerabilityMarks import datadog.trace.test.util.DDSpecification -import static com.datadog.iast.model.Range.NOT_MARKED +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED +import static com.datadog.iast.taint.Ranges.mergeRanges import static com.datadog.iast.taint.Ranges.rangesProviderFor +import static datadog.trace.api.iast.SourceTypes.REQUEST_HEADER_NAME class RangesTest extends DDSpecification { private static final int NEGATIVE_MARK = 1 << 31 + private static final int MAX_RANGE_COUNT = Config.get().iastMaxRangeCount void 'forString'() { given: final source = new Source(SourceTypes.NONE, null, null) when: - final result = Ranges.forString(s, source, VulnerabilityMarks.SQL_INJECTION_MARK) + final result = Ranges.forCharSequence(s, source, VulnerabilityMarks.SQL_INJECTION_MARK) then: result != null @@ -280,6 +284,29 @@ class RangesTest extends DDSpecification { NEGATIVE_MARK | _ } + void 'test merge ranges with limits'() { + given: + final leftRanges = (0.. rangeFor(index) } as Range[] + final rightRanges = (0.. rangeFor(index) } as Range[] + + when: + final merged = mergeRanges(offset, leftRanges, rightRanges) + + then: + merged.size() == expected + + where: + offset | left | right | expected + 0 | 1 | 1 | 2 + 0 | MAX_RANGE_COUNT | 1 | MAX_RANGE_COUNT + 0 | 1 | MAX_RANGE_COUNT | MAX_RANGE_COUNT + 0 | MAX_RANGE_COUNT | MAX_RANGE_COUNT | MAX_RANGE_COUNT + 10 | 1 | 1 | 2 + 10 | MAX_RANGE_COUNT | 1 | MAX_RANGE_COUNT + 10 | 1 | MAX_RANGE_COUNT | MAX_RANGE_COUNT + 10 | MAX_RANGE_COUNT | MAX_RANGE_COUNT | MAX_RANGE_COUNT + } + Range[] rangesFromSpec(List> spec) { def ranges = new Range[spec.size()] @@ -304,4 +331,8 @@ class RangesTest extends DDSpecification { getRanges() >> ranges } } + + Range rangeFor(final int index) { + return new Range(index, 1, new Source(REQUEST_HEADER_NAME, 'a', 'b'), NOT_MARKED) + } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintUtils.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintUtils.groovy index 7db1a75c673..88b21e9406b 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintUtils.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintUtils.groovy @@ -4,7 +4,7 @@ import com.datadog.iast.model.Range import com.datadog.iast.model.Source import datadog.trace.api.iast.SourceTypes -import static com.datadog.iast.model.Range.NOT_MARKED +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED class TaintUtils { @@ -76,7 +76,7 @@ class TaintUtils { if (value instanceof String) { return addFromTaintFormat(tos, value as String) } - tos.taintInputObject(value, new Source(SourceTypes.NONE, null, null)) + tos.taint(value, Ranges.forObject(new Source(SourceTypes.NONE, null, null))) return value } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectTest.groovy new file mode 100644 index 00000000000..d809f5b8b69 --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectTest.groovy @@ -0,0 +1,28 @@ +package com.datadog.iast.taint + +import com.datadog.iast.model.Source +import datadog.trace.api.Config +import spock.lang.Specification +import com.datadog.iast.model.Range + +import java.lang.ref.ReferenceQueue + +import static datadog.trace.api.iast.VulnerabilityMarks.NOT_MARKED +import static datadog.trace.api.iast.SourceTypes.REQUEST_HEADER_NAME + +class TaintedObjectTest extends Specification { + + void 'test that tainted objects never go over the limit'() { + given: + final max = Config.get().iastMaxRangeCount + final ranges = (0..max + 1) + .collect { index -> new Range(index, 1, new Source(REQUEST_HEADER_NAME, 'a', 'b'), NOT_MARKED) } + + when: + final tainted = new TaintedObject('test', ranges.toArray(new Range[0]), new ReferenceQueue()) + + then: + ranges.size() > max + tainted.ranges.size() == max + } +} diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectsLazyTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectsLazyTest.groovy deleted file mode 100644 index 8ed664ff27b..00000000000 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectsLazyTest.groovy +++ /dev/null @@ -1,69 +0,0 @@ -package com.datadog.iast.taint - -import com.datadog.iast.IastModuleImplTestBase -import com.datadog.iast.IastRequestContext -import com.datadog.iast.model.Source -import datadog.trace.api.gateway.RequestContext -import datadog.trace.api.gateway.RequestContextSlot -import datadog.trace.bootstrap.instrumentation.api.AgentSpan - -class TaintedObjectsLazyTest extends IastModuleImplTestBase { - - private TaintedObjects delegate = Mock(TaintedObjects) - private IastRequestContext iastCtx - private RequestContext reqCtx - private AgentSpan span - - void setup() { - delegate = Mock(TaintedObjects) - iastCtx = Mock(IastRequestContext) - iastCtx.getTaintedObjects() >> delegate - reqCtx = Mock(RequestContext) - reqCtx.getData(RequestContextSlot.IAST) >> iastCtx - span = Mock(AgentSpan) - span.getRequestContext() >> reqCtx - } - - void 'get non lazy instance'() { - when: - final to = TaintedObjects.activeTaintedObjects() - - then: - 1 * tracer.activeSpan() >> span - !(to instanceof TaintedObjects.LazyTaintedObjects) - } - - void 'get lazy objects instance'() { - when: - final to = TaintedObjects.activeTaintedObjects(true) - - then: - to instanceof TaintedObjects.LazyTaintedObjects - - when: - to.&"$method".call(args as Object[]) - - then: 'first time the active tainted objects if fetched' - 1 * delegate._ - 1 * tracer.activeSpan() >> span - - when: - to.&"$method".call(args as Object[]) - - then: 'the active tainted objets is already fetched' - 1 * delegate._ - 0 * _ - - where: - method | args - 'getEstimatedSize' | [] - 'isFlat' | [] - 'taintInputString' | ['', new Source((byte) 0, null, null)] - 'taintInputObject' | ['', new Source((byte) 0, null, null)] - 'taint' | ['', Ranges.EMPTY] - 'get' | [''] - 'release' | [] - 'count' | [] - 'iterator' | [] - } -} diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectsLogTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectsLogTest.groovy index 5ba53745d51..ea01efa3989 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectsLogTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectsLogTest.groovy @@ -3,7 +3,6 @@ package com.datadog.iast.taint import ch.qos.logback.classic.Level import ch.qos.logback.classic.Logger import com.datadog.iast.IastSystem -import com.datadog.iast.model.Range import com.datadog.iast.model.Source import datadog.trace.api.iast.SourceTypes import datadog.trace.test.util.DDSpecification @@ -35,7 +34,7 @@ class TaintedObjectsLogTest extends DDSpecification { final value = "A" when: - def tainted = taintedObjects.taintInputString(value, new Source(SourceTypes.NONE, null, null)) + def tainted = taintedObjects.taint(value, Ranges.forCharSequence(value, new Source(SourceTypes.NONE, null, null))) then: noExceptionThrown() @@ -54,9 +53,8 @@ class TaintedObjectsLogTest extends DDSpecification { IastSystem.DEBUG = true logger.level = Level.ALL TaintedObjects taintedObjects = TaintedObjects.acquire() - taintedObjects.taint('A', [new Range(0, 1, new Source(SourceTypes.NONE, null, null), Range.NOT_MARKED)] as Range[]) - taintedObjects.taintInputString('B', new Source(SourceTypes.REQUEST_PARAMETER_NAME, 'test', 'value')) - taintedObjects.taintInputObject(new Date(), new Source(SourceTypes.REQUEST_HEADER_VALUE, 'test', 'value')) + final obj = 'A' + taintedObjects.taint(obj, Ranges.forCharSequence(obj, new Source(SourceTypes.NONE, null, null))) when: taintedObjects.release() @@ -64,5 +62,21 @@ class TaintedObjectsLogTest extends DDSpecification { then: noExceptionThrown() } + + void "test TaintedObjects api calls"() { + given: + IastSystem.DEBUG = true + logger.level = Level.ALL + TaintedObjects taintedObjects = TaintedObjects.acquire() + final obj = 'A' + + when: + taintedObjects.taint(obj, Ranges.forCharSequence(obj, new Source(SourceTypes.NONE, null, null))) + + then: + taintedObjects.size() == 1 + taintedObjects.iterator().size() == 1 + !taintedObjects.flat + } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectsNoOpTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectsNoOpTest.groovy new file mode 100644 index 00000000000..c8f1f91fe86 --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/taint/TaintedObjectsNoOpTest.groovy @@ -0,0 +1,32 @@ +package com.datadog.iast.taint + +import com.datadog.iast.model.Source +import spock.lang.Specification + +class TaintedObjectsNoOpTest extends Specification { + + + void 'test no op implementation'() { + setup: + final instance = TaintedObjects.NoOp.INSTANCE + final toTaint = 'test' + + when: + final tainted = instance.taint(toTaint, Ranges.forCharSequence(toTaint, new Source(0 as byte, 'test', 'test'))) + + then: + tainted == null + instance.get(toTaint) == null + instance.count() == 0 + instance.estimatedSize == 0 + instance.size() == 0 + !instance.flat + !instance.iterator().hasNext() + + when: + instance.release() + + then: + noExceptionThrown() + } +} diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/AbstractTelemetryCallbackTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/AbstractTelemetryCallbackTest.groovy index aefaf6c46a1..92f03e910d2 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/AbstractTelemetryCallbackTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/AbstractTelemetryCallbackTest.groovy @@ -1,7 +1,7 @@ package com.datadog.iast.telemetry -import com.datadog.iast.HasDependencies.Dependencies +import com.datadog.iast.Dependencies import com.datadog.iast.IastModuleImplTestBase import datadog.trace.api.Config import datadog.trace.api.gateway.RequestContext diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/TelemetryRequestEndedHandlerTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/TelemetryRequestEndedHandlerTest.groovy index 7c0571b4d0e..30a61959d98 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/TelemetryRequestEndedHandlerTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/TelemetryRequestEndedHandlerTest.groovy @@ -3,9 +3,11 @@ package com.datadog.iast.telemetry import com.datadog.iast.IastRequestContext import com.datadog.iast.RequestEndedHandler import com.datadog.iast.model.Source +import com.datadog.iast.taint.Ranges import com.datadog.iast.taint.TaintedObjects import com.datadog.iast.telemetry.taint.TaintedObjectsWithTelemetry import datadog.trace.api.gateway.RequestContextSlot +import datadog.trace.api.iast.InstrumentationBridge import datadog.trace.api.iast.SourceTypes import datadog.trace.api.iast.VulnerabilityTypes import datadog.trace.api.iast.telemetry.IastMetric @@ -25,6 +27,7 @@ class TelemetryRequestEndedHandlerTest extends AbstractTelemetryCallbackTest { protected IastMetricCollector globalCollector void setup() { + InstrumentationBridge.clearIastModules() delegate = Spy(new RequestEndedHandler(dependencies)) final TaintedObjects to = TaintedObjectsWithTelemetry.build(Verbosity.DEBUG, TaintedObjects.acquire()) iastCtx = new IastRequestContext(to, new IastMetricCollector()) @@ -39,7 +42,7 @@ class TelemetryRequestEndedHandlerTest extends AbstractTelemetryCallbackTest { final handler = new TelemetryRequestEndedHandler(delegate) final toTaint = 'hello' final source = new Source(SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value') - iastCtx.taintedObjects.taintInputString(toTaint, source) + iastCtx.taintedObjects.taint(toTaint, Ranges.forCharSequence(toTaint, source)) when: handler.apply(reqCtx, span) @@ -61,15 +64,14 @@ class TelemetryRequestEndedHandlerTest extends AbstractTelemetryCallbackTest { given: final handler = new TelemetryRequestEndedHandler(delegate) final metric = TAINTED_FLAT_MODE - final tagValue = null when: - iastCtx.metricCollector.addMetric(metric, tagValue, 1) + iastCtx.metricCollector.addMetric(metric, (byte) -1, 1) handler.apply(reqCtx, span) then: 1 * delegate.apply(reqCtx, span) - 1 * traceSegment.setTagTop(String.format(TRACE_METRIC_PATTERN, getSpanTagValue(metric, tagValue)), 1) + 1 * traceSegment.setTagTop(String.format(TRACE_METRIC_PATTERN, getSpanTagValue(metric)), 1) when: globalCollector.prepareMetrics() @@ -109,32 +111,36 @@ class TelemetryRequestEndedHandlerTest extends AbstractTelemetryCallbackTest { where: metrics | description [ - metric(REQUEST_TAINTED, null, 123), - metric(EXECUTED_SOURCE, SourceTypes.REQUEST_PARAMETER_VALUE_STRING, 2), - metric(EXECUTED_SOURCE, SourceTypes.REQUEST_HEADER_VALUE_STRING, 4), + metric(REQUEST_TAINTED, 123), + metric(EXECUTED_SOURCE, SourceTypes.REQUEST_PARAMETER_VALUE, 2), + metric(EXECUTED_SOURCE, SourceTypes.REQUEST_HEADER_VALUE, 4), metric(EXECUTED_SINK, VulnerabilityTypes.SQL_INJECTION, 1), metric(EXECUTED_SINK, VulnerabilityTypes.COMMAND_INJECTION, 2), ] | 'List of only request scoped metrics' [ - metric(REQUEST_TAINTED, null, 123), - metric(INSTRUMENTED_SOURCE, SourceTypes.REQUEST_PARAMETER_VALUE_STRING, 2), + metric(REQUEST_TAINTED, 123), + metric(INSTRUMENTED_SOURCE, SourceTypes.REQUEST_PARAMETER_VALUE, 2), ] | 'Mix between global and request scoped metrics' } - private static String getSpanTagValue(final IastMetric metric, final String tagValue) { + private static String getSpanTagValue(final IastMetric metric, final Byte tagValue = null) { return metric.getTag() == null ? metric.getName() - : String.format("%s.%s", metric.getName(), tagValue.toLowerCase().replaceAll("\\.", "_")) + : String.format("%s.%s", metric.getName(), metric.tag.toString(tagValue).toLowerCase().replaceAll("\\.", "_")) } - private static Data metric(final IastMetric metric, final String tagValue, final int value) { + private static Data metric(final IastMetric metric, final byte tagValue, final int value) { return new Data(metric: metric, tagValue: tagValue, value: value) } + private static Data metric(final IastMetric metric, final int value) { + return new Data(metric: metric, tagValue: (byte) -1, value: value) + } + @ToString private static class Data { IastMetric metric - String tagValue + byte tagValue int value } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/TelemetryRequestStartedHandlerTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/TelemetryRequestStartedHandlerTest.groovy index 3f097768bb2..97d9565d7b5 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/TelemetryRequestStartedHandlerTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/TelemetryRequestStartedHandlerTest.groovy @@ -3,11 +3,13 @@ package com.datadog.iast.telemetry import com.datadog.iast.IastRequestContext import com.datadog.iast.telemetry.taint.TaintedObjectsWithTelemetry import datadog.trace.api.gateway.Flow +import datadog.trace.api.iast.telemetry.Verbosity class TelemetryRequestStartedHandlerTest extends AbstractTelemetryCallbackTest { void 'request started add the required collector'() { given: + injectSysConfig('dd.iast.telemetry.verbosity', verbosity.name()) final handler = new TelemetryRequestStartedHandler(dependencies) when: @@ -18,7 +20,14 @@ class TelemetryRequestStartedHandlerTest extends AbstractTelemetryCallbackTest { flow.getResult() instanceof IastRequestContext final iastCtx = flow.getResult() as IastRequestContext iastCtx.metricCollector != null - iastCtx.taintedObjects instanceof TaintedObjectsWithTelemetry + final withTelemetry = iastCtx.taintedObjects instanceof TaintedObjectsWithTelemetry + withTelemetry == taintedObjectsWithTelemetry 1 * dependencies.overheadController.acquireRequest() >> true + + where: + verbosity | taintedObjectsWithTelemetry + Verbosity.MANDATORY | false + Verbosity.INFORMATION | true + Verbosity.DEBUG | true } } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/taint/TaintedObjectsWithTelemetryTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/taint/TaintedObjectsWithTelemetryTest.groovy index 2ba948ade20..c20ca32dace 100644 --- a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/taint/TaintedObjectsWithTelemetryTest.groovy +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/telemetry/taint/TaintedObjectsWithTelemetryTest.groovy @@ -2,13 +2,11 @@ package com.datadog.iast.telemetry.taint import com.datadog.iast.IastRequestContext import com.datadog.iast.model.Range -import com.datadog.iast.model.Source import com.datadog.iast.taint.Ranges import com.datadog.iast.taint.TaintedObject import com.datadog.iast.taint.TaintedObjects import datadog.trace.api.gateway.RequestContext import datadog.trace.api.gateway.RequestContextSlot -import datadog.trace.api.iast.SourceTypes import datadog.trace.api.iast.telemetry.IastMetric import datadog.trace.api.iast.telemetry.IastMetricCollector import datadog.trace.api.iast.telemetry.Verbosity @@ -60,7 +58,7 @@ class TaintedObjectsWithTelemetryTest extends DDSpecification { then: if (IastMetric.REQUEST_TAINTED.isEnabled(verbosity)) { - 1 * mockCollector.addMetric(IastMetric.REQUEST_TAINTED, null, tainteds.size()) + 1 * mockCollector.addMetric(IastMetric.REQUEST_TAINTED, _, tainteds.size()) } else { 0 * mockCollector.addMetric } @@ -74,13 +72,11 @@ class TaintedObjectsWithTelemetryTest extends DDSpecification { final taintedObjects = TaintedObjectsWithTelemetry.build(verbosity, Mock(TaintedObjects)) when: - taintedObjects.taintInputString('test', new Source(SourceTypes.REQUEST_PARAMETER_VALUE, 'name', 'value')) - taintedObjects.taintInputObject(new Date(), new Source(SourceTypes.REQUEST_HEADER_VALUE, 'name', 'value')) taintedObjects.taint('test', new Range[0]) then: if (IastMetric.EXECUTED_TAINTED.isEnabled(verbosity)) { - 3 * mockCollector.addMetric(IastMetric.EXECUTED_TAINTED, null, 1) // two calls with one element + 1 * mockCollector.addMetric(IastMetric.EXECUTED_TAINTED, _, 1) } else { 0 * mockCollector.addMetric } @@ -100,7 +96,7 @@ class TaintedObjectsWithTelemetryTest extends DDSpecification { then: if (IastMetric.TAINTED_FLAT_MODE.isEnabled(verbosity) && taintedObjects.isFlat()) { - 1 * mockCollector.addMetric(IastMetric.TAINTED_FLAT_MODE, null, _) + 1 * mockCollector.addMetric(IastMetric.TAINTED_FLAT_MODE, _, _) } else { 0 * mockCollector.addMetric } diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/util/HttpHeaderTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/util/HttpHeaderTest.groovy new file mode 100644 index 00000000000..971df3071d7 --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/util/HttpHeaderTest.groovy @@ -0,0 +1,40 @@ +package com.datadog.iast.util + +import com.datadog.iast.IastRequestContext +import spock.lang.Specification + +import static com.datadog.iast.util.HttpHeader.Values.HEADERS + +class HttpHeaderTest extends Specification { + + + void 'test context headers'() { + setup: + final iastCtx = Spy(IastRequestContext) + + when: + final parsed = HttpHeader.from(header.name) + + then: + parsed != null + + when: + final matches = parsed.matches(header.name.toUpperCase(Locale.ROOT)) + + then: + matches + + when: + if (header instanceof HttpHeader.ContextAwareHeader) { + header.onHeader(iastCtx, "my_value") + } + + then: + if (header instanceof HttpHeader.ContextAwareHeader) { + 1 * iastCtx._ + } + + where: + header << HEADERS.values() + } +} diff --git a/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/util/ObjectVisitorTest.groovy b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/util/ObjectVisitorTest.groovy new file mode 100644 index 00000000000..1f1c1eea745 --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/groovy/com/datadog/iast/util/ObjectVisitorTest.groovy @@ -0,0 +1,118 @@ +package com.datadog.iast.util + +import foo.bar.VisitableClass +import spock.lang.Specification + +import static com.datadog.iast.util.ObjectVisitor.State.CONTINUE + +class ObjectVisitorTest extends Specification { + + void 'test visiting simple type'() { + given: + final visitor = Mock(ObjectVisitor.Visitor) + final target = '123' + + when: + ObjectVisitor.visit(target, visitor) + + then: + 1 * visitor.visit('root', target) >> CONTINUE + 0 * _ + } + + void 'test visiting collection'() { + given: + final visitor = Mock(ObjectVisitor.Visitor) + final target = ['1', '2', '3'] + + when: + ObjectVisitor.visit(target, visitor) + + then: + target.eachWithIndex { value, index -> + 1 * visitor.visit("root[$index]", value) >> CONTINUE + } + 0 * _ + } + + void 'test visiting iterable'() { + given: + final visitor = Mock(ObjectVisitor.Visitor) + final wrapped = ['1', '2', '3'] + final target = new Iterable() { + @Override + Iterator iterator() { + return wrapped.iterator() + } + } + + when: + ObjectVisitor.visit(target, visitor) + + then: + wrapped.eachWithIndex { value, index -> + 1 * visitor.visit("root[$index]", value) >> CONTINUE + } + 0 * _ + } + + void 'test visiting map'() { + given: + final visitor = Mock(ObjectVisitor.Visitor) + final target = ['a': 'b'] + + when: + ObjectVisitor.visit(target, visitor) + + then: + target.keySet().each { key -> + 1 * visitor.visit('root[]', key) >> CONTINUE + } + target.each { key, value -> + 1 * visitor.visit("root[$key]", value) >> CONTINUE + } + 0 * _ + } + + void 'test max depth'() { + given: + final visitor = Mock(ObjectVisitor.Visitor) + final target = [a : [b: ['c', 'd']]] + + when: + ObjectVisitor.visit(target, visitor, 2, Integer.MAX_VALUE) + + then: + 1 * visitor.visit("root[]", 'a') >> CONTINUE + 1 * visitor.visit("root[a][]", 'b') >> CONTINUE + 0 * _ + } + + void 'test max objects'() { + given: + final visitor = Mock(ObjectVisitor.Visitor) + final target = [1, 2, 3] + + when: 'we visit at most two objects' + ObjectVisitor.visit(target, visitor, Integer.MAX_VALUE, 2) + + then: 'we visit the array and its first element' + 1 * visitor.visit("root[0]", 1) >> CONTINUE + 0 * _ + } + + void 'test cycles'() { + given: + final visitor = Mock(ObjectVisitor.Visitor) + final target = new VisitableClass(name: 'cycle') + target.cycle = target + + when: 'we visit a class with a self reference' + ObjectVisitor.visit(target, visitor, Integer.MAX_VALUE, Integer.MAX_VALUE) + + then: 'we only visit the class once' + 1 * visitor.visit("root", target) >> CONTINUE + 1 * visitor.visit("root.name", 'cycle') >> CONTINUE + 0 * _ + } +} diff --git a/dd-java-agent/agent-iast/src/test/groovy/foo/bar/VisitableClass.groovy b/dd-java-agent/agent-iast/src/test/groovy/foo/bar/VisitableClass.groovy new file mode 100644 index 00000000000..6864ef5cf9f --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/groovy/foo/bar/VisitableClass.groovy @@ -0,0 +1,6 @@ +package foo.bar + +class VisitableClass { + String name + VisitableClass cycle +} diff --git a/dd-java-agent/agent-iast/src/test/proto/test2.proto b/dd-java-agent/agent-iast/src/test/proto/test2.proto new file mode 100644 index 00000000000..47993c2e8a9 --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/proto/test2.proto @@ -0,0 +1,15 @@ +syntax = "proto2"; + +package com.datadog.iast.protobuf; + +message Proto2Child { + optional string optional = 1; + required string required = 2; + repeated string repeated = 3; + map map = 4; + +} + +message Proto2Parent { + required Proto2Child child = 1; +} diff --git a/dd-java-agent/agent-iast/src/test/proto/test3.proto b/dd-java-agent/agent-iast/src/test/proto/test3.proto new file mode 100644 index 00000000000..e1a5c3c1cf7 --- /dev/null +++ b/dd-java-agent/agent-iast/src/test/proto/test3.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package com.datadog.iast.protobuf; + +message Proto3Child { + optional string optional = 1; + string required = 2; + repeated string repeated = 3; + map map = 4; +} + +message Proto3Parent { + Proto3Child child = 1; +} diff --git a/dd-java-agent/agent-iast/src/test/resources/redaction/evidence-redaction-suite.yml b/dd-java-agent/agent-iast/src/test/resources/redaction/evidence-redaction-suite.yml index 734f02bf153..cf92538ae54 100644 --- a/dd-java-agent/agent-iast/src/test/resources/redaction/evidence-redaction-suite.yml +++ b/dd-java-agent/agent-iast/src/test/resources/redaction/evidence-redaction-suite.yml @@ -247,6 +247,38 @@ suite: } ] } + - type: 'VULNERABILITIES' + description: 'Query with single quoted string literal and null source' + input: > + [ + { + "type": "SQL_INJECTION", + "evidence": { + "value": "select * from users where username = 'user'", + "ranges": [ + { "start" : 38, "length" : 4, "source": { "origin": "http.request.body" } } + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.body" } + ], + "vulnerabilities": [ + { + "type": "SQL_INJECTION", + "evidence": { + "valueParts": [ + { "value": "select * from users where username = '" }, + { "redacted": true, "source": 0, "pattern": "****" }, + { "value": "'" } + ] + } + } + ] + } - type: 'VULNERABILITIES' description: '$1 query with double quoted string literal $2' parameters: @@ -1605,3 +1637,389 @@ suite: } ] } + + - type: 'VULNERABILITIES' + description: 'SQLi exploited' + input: > + [ + { + "type": "SQL_INJECTION", + "evidence": { + "value": "SELECT * FROM Users WHERE email = '' OR TRUE --' AND password = '81dc9bdb52d04dc20036dbd8313ed055' AND deletedAt IS NULL", + "ranges": [ + { "start" : 35, "length" : 12, "source": { "origin": "http.request.parameter", "name": "email", "value": "' OR TRUE --" } } + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.parameter", "name": "email", "value": "' OR TRUE --" } + ], + "vulnerabilities": [ + { + "type": "SQL_INJECTION", + "evidence": { + "valueParts": [ + { "value": "SELECT * FROM Users WHERE email = '" }, + { "redacted": true }, + { "source": 0, "value": "' OR TRUE --" }, + { "redacted": true } + ] + } + } + ] + } + - type: 'VULNERABILITIES' + description: 'Consecutive ranges - at the beginning' + input: > + [ + { + "type": "UNVALIDATED_REDIRECT", + "evidence": { + "value": "https://user:password@datadoghq.com:443/api/v1/test/123/?param1=pone¶m2=ptwo#fragment1=fone&fragment2=ftwo", + "ranges": [ + { "start" : 0, "length" : 4, "source": { "origin": "http.request.parameter", "name": "protocol", "value": "http" } }, + { "start" : 4, "length" : 1, "source": { "origin": "http.request.parameter", "name": "secure", "value": "s" } }, + { "start" : 22, "length" : 13, "source": { "origin": "http.request.parameter", "name": "host", "value": "datadoghq.com" } } + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.parameter", "name": "protocol", "value": "http" }, + { "origin": "http.request.parameter", "name": "secure", "value": "s" }, + { "origin": "http.request.parameter", "name": "host", "value": "datadoghq.com" } + ], + "vulnerabilities": [ + { + "type": "UNVALIDATED_REDIRECT", + "evidence": { + "valueParts": [ + { "source": 0, "value": "http" }, + { "source": 1, "value": "s" }, + { "value": "://" }, + { "redacted": true }, + { "value": "@" }, + { "source": 2, "value": "datadoghq.com" }, + { "value": ":443/api/v1/test/123/?param1=" }, + { "redacted": true }, + { "value": "¶m2=" }, + { "redacted": true }, + { "value": "#fragment1=" }, + { "redacted": true }, + { "value": "&fragment2=" }, + { "redacted": true } + ] + } + } + ] + } + + - type: 'VULNERABILITIES' + description: 'Tainted range based redaction ' + input: > + [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS vulnerability but can be extended to future ones", + "ranges": [ + { "start" : 123, "length" : 3, "source": { "origin": "http.request.parameter", "name": "type", "value": "XSS" } } + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.parameter", "name": "type", "value": "XSS" } + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { "redacted": true }, + { "source": 0, "value": "XSS" }, + { "redacted": true } + ] + } + } + ] + } + + - type: 'VULNERABILITIES' + description: 'Tainted range based redaction - with redactable source ' + input: > + [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS vulnerability but can be extended to future ones", + "ranges": [ + { "start" : 123, "length" : 3, "source": { "origin": "http.request.parameter", "name": "password", "value": "XSS" } } + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.parameter", "name": "password", "redacted": true, "pattern": "abc" } + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { "redacted": true }, + { "source": 0, "redacted": true, "pattern": "abc"}, + { "redacted": true } + ] + } + } + ] + } + + + - type: 'VULNERABILITIES' + description: 'Tainted range based redaction - with null source ' + input: > + [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS vulnerability but can be extended to future ones", + "ranges": [ + { "start" : 123, "length" : 3, "source": { "origin": "http.request.body" } } + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.body" } + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { "redacted": true }, + { "source": 0, "value": "XSS" }, + { "redacted": true } + ] + } + } + ] + } + + - type: 'VULNERABILITIES' + description: 'Tainted range based redaction - multiple ranges' + input: > + [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS vulnerability but can be extended to future ones", + "ranges": [ + { "start" : 16, "length" : 10, "source": { "origin": "http.request.parameter", "name": "text", "value": "super long" }}, + { "start" : 123, "length" : 3, "source": { "origin": "http.request.parameter", "name": "type", "value": "XSS" }} + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.parameter", "name": "text", "value": "super long" }, + { "origin": "http.request.parameter", "name": "type", "value": "XSS"} + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { "redacted": true }, + { "source": 0, "value": "super long" }, + { "redacted": true }, + {"source": 1, "value": "XSS"}, + { "redacted": true } + ] + } + } + ] + } + + - type: 'VULNERABILITIES' + description: 'Tainted range based redaction - first range at the beginning ' + input: > + [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS vulnerability but can be extended to future ones", + "ranges": [ + { "start" : 0, "length" : 4, "source": { "origin": "http.request.parameter", "name": "text", "value": "this" }}, + { "start" : 123, "length" : 3, "source": { "origin": "http.request.parameter", "name": "type", "value": "XSS" }} + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.parameter", "name": "text", "value": "this" }, + { "origin": "http.request.parameter", "name": "type", "value": "XSS"} + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { "source": 0, "value": "this" }, + { "redacted": true }, + {"source": 1, "value": "XSS"}, + { "redacted": true } + ] + } + } + ] + } + + - type: 'VULNERABILITIES' + description: 'Tainted range based redaction - last range at the end ' + input: > + [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS", + "ranges": [ + { "start" : 0, "length" : 4, "source": { "origin": "http.request.parameter", "name": "text", "value": "this" }}, + { "start" : 123, "length" : 3, "source": { "origin": "http.request.parameter", "name": "type", "value": "XSS" }} + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.parameter", "name": "text", "value": "this" }, + { "origin": "http.request.parameter", "name": "type", "value": "XSS"} + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { "source": 0, "value": "this" }, + { "redacted": true }, + {"source": 1, "value": "XSS"} + ] + } + } + ] + } + + - type: 'VULNERABILITIES' + description: 'Tainted range based redaction - whole text ' + input: > + [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS", + "ranges": [ + { "start" : 0, "length" : 126, "source": { "origin": "http.request.parameter", "name": "text", "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS" }} + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.parameter", "name": "text", "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS"} + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { "source": 0, "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS" } + ] + } + } + ] + } + + - type: 'VULNERABILITIES' + description: 'Redacted source that needs to be truncated' + input: > + [ + { + "type": "SQL_INJECTION", + "evidence": { + "value": "select * from users where username = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.Sed ut perspiciatis unde omnis iste natus error sit voluptatem ac'", + "ranges": [ + { "start" : 26, "length" : 523, "source": { "origin": "http.request.parameter", "name": "clause", "value": "username = 'Lorem%20ipsum%20dolor%20sit%20amet,%20consectetur%20adipiscing%20elit,%20sed%20do%20eiusmod%20tempor%20incididunt%20ut%20labore%20et%20dolore%20magna%20aliqua.%20Ut%20enim%20ad%20minim%20veniam,%20quis%20nostrud%20exercitation%20ullamco%20laboris%20nisi%20ut%20aliquip%20ex%20ea%20commodo%20consequat.%20Duis%20aute%20irure%20dolor%20in%20reprehenderit%20in%20voluptate%20velit%20esse%20cillum%20dolore%20eu%20fugiat%20nulla%20pariatur.%20Excepteur%20sint%20occaecat%20cupidatat%20non%20proident,%20sunt%20in%20culpa%20qui%20officia%20deserunt%20mollit%20anim%20id%20est%20laborum.Sed%20ut%20perspiciatis%20unde%20omnis%20iste%20natus%20error%20sit%20voluptatem%20ac'" } } + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.parameter", "name": "clause", "redacted": true, "pattern": "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789ab", "truncated": "right"} + ], + "vulnerabilities": [ + { + "type": "SQL_INJECTION", + "evidence": { + "valueParts": [ + { "value": "select * from users where " }, + { "source": 0, "value": "username = '" }, + { "source": 0, "redacted": true, "truncated": "right", "pattern": "**********************************************************************************************************************************************************************************************************************************************************" }, + { "source": 0, "value": "'" } + ] + } + } + ] + } + + - type: 'VULNERABILITIES' + description: 'No redacted that needs to be truncated - whole text' + input: > + [ + { + "type": "XSS", + "evidence": { + "value": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.Sed ut perspiciatis unde omnis iste natus error sit voluptatem ac", + "ranges": [ + { "start" : 0, "length" : 510, "source": { "origin": "http.request.parameter", "name": "text", "value": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.Sed ut perspiciatis unde omnis iste natus error sit voluptatem ac" }} + ] + } + } + ] + expected: > + { + "sources": [ + { "origin": "http.request.parameter", "name": "text", "truncated": "right", "value": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure do"} + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { "source": 0, "truncated": "right", "value": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure do" } + ] + } + } + ] + } + diff --git a/dd-java-agent/agent-iast/src/testFixtures/groovy/com/datadog/iast/test/IastRequestContextPreparationTrait.groovy b/dd-java-agent/agent-iast/src/testFixtures/groovy/com/datadog/iast/test/IastRequestContextPreparationTrait.groovy index ffa956d5ded..6e0b849f6ca 100644 --- a/dd-java-agent/agent-iast/src/testFixtures/groovy/com/datadog/iast/test/IastRequestContextPreparationTrait.groovy +++ b/dd-java-agent/agent-iast/src/testFixtures/groovy/com/datadog/iast/test/IastRequestContextPreparationTrait.groovy @@ -3,16 +3,9 @@ package com.datadog.iast.test import com.datadog.iast.IastRequestContext import com.datadog.iast.IastSystem import com.datadog.iast.model.Range -import com.datadog.iast.model.Source import com.datadog.iast.taint.TaintedObject import com.datadog.iast.taint.TaintedObjects -import datadog.trace.api.gateway.CallbackProvider -import datadog.trace.api.gateway.EventType -import datadog.trace.api.gateway.Events -import datadog.trace.api.gateway.Flow -import datadog.trace.api.gateway.IGSpanInfo -import datadog.trace.api.gateway.RequestContext -import datadog.trace.api.gateway.RequestContextSlot +import datadog.trace.api.gateway.* import datadog.trace.api.iast.InstrumentationBridge import datadog.trace.bootstrap.instrumentation.api.AgentTracer import org.slf4j.Logger @@ -68,22 +61,12 @@ trait IastRequestContextPreparationTrait { List objects = Collections.synchronizedList([]) - TaintedObject taintInputString(String obj, Source source, int mark) { - objects << obj - this.delegate.taintInputString(obj, source, mark) - logTaint obj - } - - TaintedObject taintInputObject(Object obj, Source source, int mark) { - objects << obj - this.delegate.taintInputObject(obj, source, mark) - logTaint obj - } - + @Override TaintedObject taint(Object obj, Range[] ranges) { objects << obj - this.delegate.taintInputString(obj, ranges) + final tainted = this.delegate.taint(obj, ranges) logTaint obj + return tainted } private final static Logger LOGGER = LoggerFactory.getLogger("map tainted objects") diff --git a/dd-java-agent/agent-iast/src/testFixtures/groovy/com/datadog/iast/test/NoopOverheadController.groovy b/dd-java-agent/agent-iast/src/testFixtures/groovy/com/datadog/iast/test/NoopOverheadController.groovy index 74964a13634..32152663dc1 100644 --- a/dd-java-agent/agent-iast/src/testFixtures/groovy/com/datadog/iast/test/NoopOverheadController.groovy +++ b/dd-java-agent/agent-iast/src/testFixtures/groovy/com/datadog/iast/test/NoopOverheadController.groovy @@ -2,6 +2,7 @@ package com.datadog.iast.test import com.datadog.iast.overhead.Operation import com.datadog.iast.overhead.OverheadController +import com.github.javaparser.quality.Nullable import datadog.trace.bootstrap.instrumentation.api.AgentSpan import groovy.transform.CompileStatic @@ -18,12 +19,12 @@ class NoopOverheadController implements OverheadController { } @Override - boolean hasQuota(Operation operation, AgentSpan span) { + boolean hasQuota(Operation operation, @Nullable AgentSpan span) { true } @Override - boolean consumeQuota(Operation operation, AgentSpan span) { + boolean consumeQuota(Operation operation, @Nullable AgentSpan span) { true } diff --git a/dd-java-agent/agent-jmxfetch/build.gradle b/dd-java-agent/agent-jmxfetch/build.gradle index 5049af66a9e..afb349e2342 100644 --- a/dd-java-agent/agent-jmxfetch/build.gradle +++ b/dd-java-agent/agent-jmxfetch/build.gradle @@ -11,10 +11,12 @@ plugins { apply from: "$rootDir/gradle/java.gradle" dependencies { - api('com.datadoghq:jmxfetch:0.47.9') { + api('com.datadoghq:jmxfetch:0.47.10') { exclude group: 'org.slf4j', module: 'slf4j-api' exclude group: 'org.slf4j', module: 'slf4j-jdk14' exclude group: 'com.beust', module: 'jcommander' + exclude group: 'com.fasterxml.jackson.core', module: 'jackson-core' + exclude group: 'com.fasterxml.jackson.jr', module: 'jackson-jr-objects' } api deps.slf4j api project(':internal-api') diff --git a/dd-java-agent/agent-jmxfetch/integrations-core b/dd-java-agent/agent-jmxfetch/integrations-core index bb8520bc2a8..03aed80d105 160000 --- a/dd-java-agent/agent-jmxfetch/integrations-core +++ b/dd-java-agent/agent-jmxfetch/integrations-core @@ -1 +1 @@ -Subproject commit bb8520bc2a877a2d1fbe640bd0526378ec20e8bc +Subproject commit 03aed80d105aa81b047e74c6da086165cac5ff6f diff --git a/dd-java-agent/agent-jmxfetch/src/main/java/com/fasterxml/jackson/core/JsonProcessingException.java b/dd-java-agent/agent-jmxfetch/src/main/java/com/fasterxml/jackson/core/JsonProcessingException.java new file mode 100644 index 00000000000..4ceefbc5622 --- /dev/null +++ b/dd-java-agent/agent-jmxfetch/src/main/java/com/fasterxml/jackson/core/JsonProcessingException.java @@ -0,0 +1,4 @@ +package com.fasterxml.jackson.core; + +// empty stub; here to satisfy a catch reference in org.datadog.jmxfetch.App +public class JsonProcessingException extends java.io.IOException {} diff --git a/dd-java-agent/agent-jmxfetch/src/main/java/datadog/trace/agent/jmxfetch/JMXFetch.java b/dd-java-agent/agent-jmxfetch/src/main/java/datadog/trace/agent/jmxfetch/JMXFetch.java index e4b6b171afc..5f905fe5585 100644 --- a/dd-java-agent/agent-jmxfetch/src/main/java/datadog/trace/agent/jmxfetch/JMXFetch.java +++ b/dd-java-agent/agent-jmxfetch/src/main/java/datadog/trace/agent/jmxfetch/JMXFetch.java @@ -95,7 +95,6 @@ private static void run(final StatsDClientManager statsDClientManager, final Con // App should be run as daemon otherwise CLI apps would not exit once main method exits. .daemon(true) .embedded(true) - .exitWatcher(new TraceConfigExitWatcher()) .confdDirectory(jmxFetchConfigDir) .yamlFileList(jmxFetchConfigs) .targetDirectInstances(true) @@ -132,7 +131,7 @@ public void run() { if (!appConfig.getExitWatcher().shouldExit()) { try { final int result = app.run(); - log.error("jmx collector exited with result: " + result); + log.error("jmx collector exited with result: {}", result); } catch (final Exception e) { log.error("Exception in jmx collector thread", e); } diff --git a/dd-java-agent/agent-logging/src/main/java/datadog/trace/logging/ddlogger/DDLoggerFactory.java b/dd-java-agent/agent-logging/src/main/java/datadog/trace/logging/ddlogger/DDLoggerFactory.java index afa838ae20a..1958c015f65 100644 --- a/dd-java-agent/agent-logging/src/main/java/datadog/trace/logging/ddlogger/DDLoggerFactory.java +++ b/dd-java-agent/agent-logging/src/main/java/datadog/trace/logging/ddlogger/DDLoggerFactory.java @@ -75,4 +75,9 @@ private LoggerHelperFactory getHelperFactory() { } return factory; } + + @Override + public void reinitialize() { + helperFactory = null; + } } diff --git a/dd-java-agent/agent-profiling/profiling-controller-ddprof/build.gradle b/dd-java-agent/agent-profiling/profiling-controller-ddprof/build.gradle index 49302e0ad0c..855c5b68bca 100644 --- a/dd-java-agent/agent-profiling/profiling-controller-ddprof/build.gradle +++ b/dd-java-agent/agent-profiling/profiling-controller-ddprof/build.gradle @@ -6,6 +6,7 @@ ext { skipSettingTestJavaVersion = true // need access to jdk.jfr package skipSettingCompilerRelease = true + excludeJdk = ['SEMERU11', 'SEMERU17'] } apply from: "$rootDir/gradle/java.gradle" diff --git a/dd-java-agent/agent-profiling/profiling-controller-jfr/build.gradle b/dd-java-agent/agent-profiling/profiling-controller-jfr/build.gradle index 36047c0e6f0..5dc47d1c0f6 100644 --- a/dd-java-agent/agent-profiling/profiling-controller-jfr/build.gradle +++ b/dd-java-agent/agent-profiling/profiling-controller-jfr/build.gradle @@ -6,6 +6,8 @@ ext { // need access to jdk.jfr package skipSettingCompilerRelease = true + + excludeJdk = ['SEMERU11', 'SEMERU17'] } apply from: "$rootDir/gradle/java.gradle" diff --git a/dd-java-agent/agent-profiling/profiling-controller-jfr/src/main/resources/jfr/dd.jfp b/dd-java-agent/agent-profiling/profiling-controller-jfr/src/main/resources/jfr/dd.jfp index debc9e78413..f5e10fa9c38 100644 --- a/dd-java-agent/agent-profiling/profiling-controller-jfr/src/main/resources/jfr/dd.jfp +++ b/dd-java-agent/agent-profiling/profiling-controller-jfr/src/main/resources/jfr/dd.jfp @@ -232,6 +232,8 @@ jdk.ActiveRecording#enabled=true jdk.ActiveSetting#enabled=true jdk.DataLoss#enabled=true jdk.DumpReason#enabled=true +jdk.ZAllocationStall#enabled=true +jdk.ZAllocationStall#threshold=10 ms jdk.ZPageAllocation#enabled=true jdk.ZPageAllocation#threshold=10 ms jdk.ZThreadPhase#enabled=true diff --git a/dd-java-agent/agent-profiling/profiling-controller-openjdk/build.gradle b/dd-java-agent/agent-profiling/profiling-controller-openjdk/build.gradle index facdd7fb8c7..2f561175283 100644 --- a/dd-java-agent/agent-profiling/profiling-controller-openjdk/build.gradle +++ b/dd-java-agent/agent-profiling/profiling-controller-openjdk/build.gradle @@ -3,6 +3,7 @@ ext { minJavaVersionForTests = JavaVersion.VERSION_11 // Zulu has backported profiling support forceJdk = ['ZULU8'] + excludeJdk = ['SEMERU11', 'SEMERU17'] // By default tests with be compiled for `minJavaVersionForTests` version, // but in this case we would like to avoid this since we would like to run with ZULU8 skipSettingTestJavaVersion = true @@ -16,6 +17,9 @@ apply plugin: 'idea' dependencies { api deps.slf4j api project(':internal-api') + api(project(':dd-java-agent:agent-bootstrap')) { + exclude group: 'com.datadoghq', module: 'agent-logging' + } api project(':dd-java-agent:agent-profiling:profiling-auxiliary') api project(':dd-java-agent:agent-profiling:profiling-controller') api project(':dd-java-agent:agent-profiling:profiling-controller-jfr') diff --git a/dd-java-agent/agent-profiling/profiling-controller-openjdk/src/main/java/com/datadog/profiling/controller/openjdk/OpenJdkController.java b/dd-java-agent/agent-profiling/profiling-controller-openjdk/src/main/java/com/datadog/profiling/controller/openjdk/OpenJdkController.java index 1f576bac76e..541ad63d14e 100644 --- a/dd-java-agent/agent-profiling/profiling-controller-openjdk/src/main/java/com/datadog/profiling/controller/openjdk/OpenJdkController.java +++ b/dd-java-agent/agent-profiling/profiling-controller-openjdk/src/main/java/com/datadog/profiling/controller/openjdk/OpenJdkController.java @@ -16,7 +16,10 @@ package com.datadog.profiling.controller.openjdk; import static com.datadog.profiling.controller.ProfilingSupport.*; +import static com.datadog.profiling.controller.ProfilingSupport.isObjectCountParallelized; import static datadog.trace.api.Platform.isJavaVersionAtLeast; +import static datadog.trace.api.config.ProfilingConfig.PROFILING_HEAP_HISTOGRAM_ENABLED; +import static datadog.trace.api.config.ProfilingConfig.PROFILING_HEAP_HISTOGRAM_ENABLED_DEFAULT; import static datadog.trace.api.config.ProfilingConfig.PROFILING_ULTRA_MINIMAL; import com.datadog.profiling.controller.ConfigurationException; @@ -27,6 +30,7 @@ import datadog.trace.api.Platform; import datadog.trace.api.config.ProfilingConfig; import datadog.trace.bootstrap.config.provider.ConfigProvider; +import datadog.trace.bootstrap.instrumentation.jfr.exceptions.ExceptionProfiling; import de.thetaphi.forbiddenapis.SuppressForbidden; import java.io.IOException; import java.time.Duration; @@ -99,6 +103,16 @@ public OpenJdkController(final ConfigProvider configProvider) disableEvent(recordingSettings, "jdk.FileWrite", EXPENSIVE_ON_CURRENT_JVM); } + if (configProvider.getBoolean( + PROFILING_HEAP_HISTOGRAM_ENABLED, PROFILING_HEAP_HISTOGRAM_ENABLED_DEFAULT)) { + if (!isObjectCountParallelized()) { + log.warn( + "enabling Datadog heap histogram on JVM without an efficient implementation of the jdk.ObjectCount event. " + + "This may increase p99 latency. Consider upgrading to JDK 17.0.9+ or 21+ to reduce latency impact."); + } + enableEvent(recordingSettings, "jdk.ObjectCount", "user enabled histogram heap collection"); + } + // Toggle settings from override file try { @@ -169,6 +183,10 @@ && isEventEnabled(recordingSettings, "jdk.NativeMethodSample")) { this.recordingSettings = Collections.unmodifiableMap(recordingSettings); + if (isEventEnabled(this.recordingSettings, "datadog.ExceptionSample")) { + ExceptionProfiling.getInstance().start(); + } + // Register periodic events AvailableProcessorCoresEvent.register(); } diff --git a/dd-java-agent/agent-profiling/profiling-controller/src/main/java/com/datadog/profiling/controller/ProfilingSupport.java b/dd-java-agent/agent-profiling/profiling-controller/src/main/java/com/datadog/profiling/controller/ProfilingSupport.java index c53e8130947..5e04651f115 100644 --- a/dd-java-agent/agent-profiling/profiling-controller/src/main/java/com/datadog/profiling/controller/ProfilingSupport.java +++ b/dd-java-agent/agent-profiling/profiling-controller/src/main/java/com/datadog/profiling/controller/ProfilingSupport.java @@ -26,6 +26,12 @@ public static boolean isObjectAllocationSampleAvailable() { return isJavaVersionAtLeast(16); } + public static boolean isObjectCountParallelized() { + // parallelized jdk.ObjectCount implemented in JDK21 and backported to JDK17 + // https://bugs.openjdk.org/browse/JDK-8307348 + return (isJavaVersion(17) && isJavaVersionAtLeast(17, 0, 9)) || isJavaVersionAtLeast(21); + } + public static boolean isNativeMethodSampleAvailable() { if (isOracleJDK8()) { return false; diff --git a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfiler.java b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfiler.java index 58ac043f327..bd8d4a37b24 100644 --- a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfiler.java +++ b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfiler.java @@ -14,11 +14,12 @@ import static com.datadog.profiling.ddprof.DatadogProfilerConfig.getWallInterval; import static com.datadog.profiling.ddprof.DatadogProfilerConfig.isAllocationProfilingEnabled; import static com.datadog.profiling.ddprof.DatadogProfilerConfig.isCpuProfilerEnabled; -import static com.datadog.profiling.ddprof.DatadogProfilerConfig.isJmethodIdCacheEnabled; import static com.datadog.profiling.ddprof.DatadogProfilerConfig.isLiveHeapSizeTrackingEnabled; import static com.datadog.profiling.ddprof.DatadogProfilerConfig.isMemoryLeakProfilingEnabled; +import static com.datadog.profiling.ddprof.DatadogProfilerConfig.isResourceNameContextAttributeEnabled; import static com.datadog.profiling.ddprof.DatadogProfilerConfig.isSpanNameContextAttributeEnabled; import static com.datadog.profiling.ddprof.DatadogProfilerConfig.isWallClockProfilerEnabled; +import static com.datadog.profiling.ddprof.DatadogProfilerConfig.omitLineNumbers; import static com.datadog.profiling.utils.ProfilingMode.ALLOCATION; import static com.datadog.profiling.utils.ProfilingMode.CPU; import static com.datadog.profiling.utils.ProfilingMode.MEMLEAK; @@ -58,6 +59,7 @@ public final class DatadogProfiler { private static final int[] EMPTY = new int[0]; private static final String OPERATION = "_dd.trace.operation"; + private static final String RESOURCE = "_dd.trace.resource"; private static final int MAX_NUM_ENDPOINTS = 8192; @@ -144,6 +146,7 @@ private DatadogProfiler(ConfigProvider configProvider) throws UnsupportedEnviron try { profiler = JavaProfiler.getInstance( + configProvider.getString(ProfilingConfig.PROFILING_DATADOG_PROFILER_LIBPATH), configProvider.getString( ProfilingConfig.PROFILING_DATADOG_PROFILER_SCRATCH, ProfilingConfig.PROFILING_DATADOG_PROFILER_SCRATCH_DEFAULT)); @@ -172,6 +175,9 @@ private DatadogProfiler(ConfigProvider configProvider) throws UnsupportedEnviron if (isSpanNameContextAttributeEnabled(configProvider)) { orderedContextAttributes.add(OPERATION); } + if (isResourceNameContextAttributeEnabled(configProvider)) { + orderedContextAttributes.add(RESOURCE); + } this.contextSetter = new ContextSetter(profiler, orderedContextAttributes); this.queueTimeThreshold = configProvider.getLong( @@ -305,6 +311,9 @@ String cmdStartProfiling(Path file) throws IllegalStateException { cmd.append(",cstack=").append(getCStack(configProvider)); cmd.append(",safemode=").append(getSafeMode(configProvider)); cmd.append(",attributes=").append(String.join(";", orderedContextAttributes)); + if (omitLineNumbers(configProvider)) { + cmd.append(",linenumbers=f"); + } if (profilingModes.contains(CPU)) { // cpu profiling is enabled. String schedulingEvent = getSchedulingEvent(configProvider); @@ -343,10 +352,6 @@ String cmdStartProfiling(Path file) throws IllegalStateException { cmd.append(isLiveHeapSizeTrackingEnabled(configProvider) ? 'L' : 'l'); } } - if (isJmethodIdCacheEnabled()) { - // element retention is 30 chunk rotations, will be checked only if >10000 elements in cache - cmd.append(",minfocache=30:1000"); - } String cmdString = cmd.toString(); log.debug("Datadog profiler command line: {}", cmdString); return cmdString; @@ -367,6 +372,10 @@ public int operationNameOffset() { return offsetOf(OPERATION); } + public int resourceNameOffset() { + return offsetOf(RESOURCE); + } + public int offsetOf(String attribute) { return contextSetter.offsetOf(attribute); } diff --git a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilerConfig.java b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilerConfig.java index e4470e70f55..f45be14f8ac 100644 --- a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilerConfig.java +++ b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilerConfig.java @@ -3,6 +3,7 @@ import static datadog.trace.api.Platform.isJ9; import static datadog.trace.api.config.ProfilingConfig.PROFILING_ALLOCATION_ENABLED; import static datadog.trace.api.config.ProfilingConfig.PROFILING_CONTEXT_ATTRIBUTES; +import static datadog.trace.api.config.ProfilingConfig.PROFILING_CONTEXT_ATTRIBUTES_RESOURCE_NAME_ENABLED; import static datadog.trace.api.config.ProfilingConfig.PROFILING_CONTEXT_ATTRIBUTES_SPAN_NAME_ENABLED; import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_ALLOC_ENABLED; import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_ALLOC_ENABLED_DEFAULT; @@ -15,6 +16,8 @@ import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_CSTACK; import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_CSTACK_DEFAULT; import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_LIBPATH; +import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_LINE_NUMBERS; +import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_LINE_NUMBERS_DEFAULT; import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_LIVEHEAP_CAPACITY; import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_LIVEHEAP_CAPACITY_DEFAULT; import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_LIVEHEAP_ENABLED; @@ -40,8 +43,6 @@ import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_WALL_ENABLED; import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_WALL_INTERVAL; import static datadog.trace.api.config.ProfilingConfig.PROFILING_DATADOG_PROFILER_WALL_INTERVAL_DEFAULT; -import static datadog.trace.api.config.ProfilingConfig.PROFILING_JMETHODID_CACHE_ENABLED; -import static datadog.trace.api.config.ProfilingConfig.PROFILING_JMETHODID_CACHE_ENABLED_DEFAULT; import static datadog.trace.api.config.ProfilingConfig.PROFILING_QUEUEING_TIME_ENABLED; import static datadog.trace.api.config.ProfilingConfig.PROFILING_QUEUEING_TIME_ENABLED_DEFAULT; import static datadog.trace.api.config.ProfilingConfig.PROFILING_ULTRA_MINIMAL; @@ -108,8 +109,8 @@ public static boolean isWallClockProfilerEnabled(ConfigProvider configProvider) boolean isUltraMinimal = getBoolean(configProvider, PROFILING_ULTRA_MINIMAL, false); boolean isTracingEnabled = configProvider.getBoolean(TRACE_ENABLED, true); boolean disableUnlessOptedIn = isUltraMinimal || !isTracingEnabled || isJ9(); - return getBoolean( - configProvider, PROFILING_DATADOG_PROFILER_WALL_ENABLED, disableUnlessOptedIn); + boolean enabledByDefault = !disableUnlessOptedIn; + return getBoolean(configProvider, PROFILING_DATADOG_PROFILER_WALL_ENABLED, enabledByDefault); } public static int getWallInterval(ConfigProvider configProvider) { @@ -246,6 +247,13 @@ public static String getCStack() { return getCStack(ConfigProvider.getInstance()); } + public static boolean omitLineNumbers(ConfigProvider configProvider) { + return !getBoolean( + configProvider, + PROFILING_DATADOG_PROFILER_LINE_NUMBERS, + PROFILING_DATADOG_PROFILER_LINE_NUMBERS_DEFAULT); + } + private static int clamp(int min, int max, int value) { return Math.max(min, Math.min(max, value)); } @@ -282,13 +290,8 @@ public static boolean isSpanNameContextAttributeEnabled(ConfigProvider configPro return configProvider.getBoolean(PROFILING_CONTEXT_ATTRIBUTES_SPAN_NAME_ENABLED, true); } - public static boolean isJmethodIdCacheEnabled() { - return isJmethodIdCacheEnabled(ConfigProvider.getInstance()); - } - - public static boolean isJmethodIdCacheEnabled(ConfigProvider configProvider) { - return configProvider.getBoolean( - PROFILING_JMETHODID_CACHE_ENABLED, PROFILING_JMETHODID_CACHE_ENABLED_DEFAULT); + public static boolean isResourceNameContextAttributeEnabled(ConfigProvider configProvider) { + return configProvider.getBoolean(PROFILING_CONTEXT_ATTRIBUTES_RESOURCE_NAME_ENABLED, true); } public static String getString(ConfigProvider configProvider, String key, String defaultValue) { diff --git a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilerContextSetter.java b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilerContextSetter.java index 40a656ceb65..58c9fbfd9cb 100644 --- a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilerContextSetter.java +++ b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilerContextSetter.java @@ -1,8 +1,7 @@ package com.datadog.profiling.ddprof; -import datadog.trace.api.experimental.ProfilingContextSetter; - -public class DatadogProfilerContextSetter implements ProfilingContextSetter { +public class DatadogProfilerContextSetter + implements datadog.trace.api.profiling.ProfilingContextAttribute { private final int offset; private final DatadogProfiler profiler; @@ -12,12 +11,10 @@ public DatadogProfilerContextSetter(String attribute, DatadogProfiler profiler) this.profiler = profiler; } - @Override public void set(CharSequence value) { profiler.setContextValue(offset, value); } - @Override public void clear() { profiler.clearContextValue(offset); } diff --git a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilingIntegration.java b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilingIntegration.java index 608fdd02cc5..6810a590419 100644 --- a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilingIntegration.java +++ b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilingIntegration.java @@ -1,7 +1,7 @@ package com.datadog.profiling.ddprof; -import datadog.trace.api.experimental.ProfilingContextSetter; -import datadog.trace.api.experimental.ProfilingScope; +import datadog.trace.api.profiling.ProfilingContextAttribute; +import datadog.trace.api.profiling.ProfilingScope; import datadog.trace.bootstrap.instrumentation.api.ProfilerContext; import datadog.trace.bootstrap.instrumentation.api.ProfilingContextIntegration; @@ -13,6 +13,7 @@ public class DatadogProfilingIntegration implements ProfilingContextIntegration private static final DatadogProfiler DDPROF = DatadogProfiler.getInstance(); private static final int SPAN_NAME_INDEX = DDPROF.operationNameOffset(); + private static final int RESOURCE_NAME_INDEX = DDPROF.resourceNameOffset(); private static final boolean WALLCLOCK_ENABLED = DatadogProfilerConfig.isWallClockProfilerEnabled(); @@ -42,22 +43,14 @@ public int encode(CharSequence constant) { public void setContext(ProfilerContext profilerContext) { DDPROF.setSpanContext(profilerContext.getSpanId(), profilerContext.getRootSpanId()); DDPROF.setContextValue(SPAN_NAME_INDEX, profilerContext.getEncodedOperationName()); + DDPROF.setContextValue(RESOURCE_NAME_INDEX, profilerContext.getEncodedResourceName()); } @Override public void clearContext() { DDPROF.clearSpanContext(); DDPROF.clearContextValue(SPAN_NAME_INDEX); - } - - @Override - public void setContextValue(String attribute, String value) { - DDPROF.setContextValue(attribute, value); - } - - @Override - public void clearContextValue(String attribute) { - DDPROF.clearContextValue(attribute); + DDPROF.clearContextValue(RESOURCE_NAME_INDEX); } @Override @@ -66,7 +59,7 @@ public void setContext(long rootSpanId, long spanId) { } @Override - public ProfilingContextSetter createContextSetter(String attribute) { + public ProfilingContextAttribute createContextAttribute(String attribute) { return new DatadogProfilerContextSetter(attribute, DDPROF); } diff --git a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilingScope.java b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilingScope.java index 5abf7562a29..ccb7c72af65 100644 --- a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilingScope.java +++ b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/DatadogProfilingScope.java @@ -1,6 +1,7 @@ package com.datadog.profiling.ddprof; -import datadog.trace.api.experimental.ProfilingScope; +import datadog.trace.api.profiling.ProfilingContextAttribute; +import datadog.trace.api.profiling.ProfilingScope; public class DatadogProfilingScope implements ProfilingScope { private final DatadogProfiler profiler; @@ -16,11 +17,25 @@ public void setContextValue(String attribute, String value) { profiler.setContextValue(attribute, value); } + @Override + public void setContextValue(ProfilingContextAttribute attribute, String value) { + if (attribute instanceof DatadogProfilerContextSetter) { + ((DatadogProfilerContextSetter) attribute).set(value); + } + } + @Override public void clearContextValue(String attribute) { profiler.clearContextValue(attribute); } + @Override + public void clearContextValue(ProfilingContextAttribute attribute) { + if (attribute instanceof DatadogProfilerContextSetter) { + ((DatadogProfilerContextSetter) attribute).clear(); + } + } + @Override public void close() { for (int i = 0; i < snapshot.length; i++) { diff --git a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/QueueTimeTracker.java b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/QueueTimeTracker.java index b04cf869398..cf7ac164347 100644 --- a/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/QueueTimeTracker.java +++ b/dd-java-agent/agent-profiling/profiling-ddprof/src/main/java/com/datadog/profiling/ddprof/QueueTimeTracker.java @@ -2,6 +2,8 @@ import com.datadoghq.profiler.JavaProfiler; import datadog.trace.api.profiling.QueueTiming; +import datadog.trace.bootstrap.instrumentation.api.TaskWrapper; +import java.lang.ref.WeakReference; public class QueueTimeTracker implements QueueTiming { @@ -9,7 +11,7 @@ public class QueueTimeTracker implements QueueTiming { private final Thread origin; private final long threshold; private final long startTicks; - private Class task; + private WeakReference weakTask; private Class scheduler; public QueueTimeTracker(JavaProfiler profiler, long threshold) { @@ -21,8 +23,8 @@ public QueueTimeTracker(JavaProfiler profiler, long threshold) { } @Override - public void setTask(Class task) { - this.task = task; + public void setTask(Object task) { + this.weakTask = new WeakReference<>(task); } @Override @@ -32,11 +34,19 @@ public void setScheduler(Class scheduler) { @Override public void close() { - assert task != null && scheduler != null; - // potentually avoidable JNI call - long endTicks = profiler.getCurrentTicks(); - if (profiler.isThresholdExceeded(threshold, startTicks, endTicks)) { - profiler.recordQueueTime(startTicks, endTicks, task, scheduler, origin); + assert weakTask != null && scheduler != null; + Object task = this.weakTask.get(); + if (task != null) { + // potentially avoidable JNI call + long endTicks = profiler.getCurrentTicks(); + if (profiler.isThresholdExceeded(threshold, startTicks, endTicks)) { + // note: because this type traversal can update secondary_super_cache (see JDK-8180450) + // we avoid doing this unless we are absolutely certain we will record the event + Class taskType = TaskWrapper.getUnwrappedType(task); + if (taskType != null) { + profiler.recordQueueTime(startTicks, endTicks, taskType, scheduler, origin); + } + } } } } diff --git a/dd-java-agent/agent-profiling/profiling-ddprof/src/test/java/com/datadog/profiling/ddprof/DatadogProfilerTest.java b/dd-java-agent/agent-profiling/profiling-ddprof/src/test/java/com/datadog/profiling/ddprof/DatadogProfilerTest.java index 12c262349b1..67531996499 100644 --- a/dd-java-agent/agent-profiling/profiling-ddprof/src/test/java/com/datadog/profiling/ddprof/DatadogProfilerTest.java +++ b/dd-java-agent/agent-profiling/profiling-ddprof/src/test/java/com/datadog/profiling/ddprof/DatadogProfilerTest.java @@ -10,8 +10,7 @@ import com.datadog.profiling.controller.UnsupportedEnvironmentException; import com.datadog.profiling.utils.ProfilingMode; import datadog.trace.api.config.ProfilingConfig; -import datadog.trace.api.experimental.ProfilingContextSetter; -import datadog.trace.api.experimental.ProfilingScope; +import datadog.trace.api.profiling.ProfilingScope; import datadog.trace.bootstrap.config.provider.ConfigProvider; import java.nio.file.Path; import java.nio.file.Paths; @@ -116,8 +115,8 @@ public void testContextRegistration() throws UnsupportedEnvironmentException { assertTrue(profiler.setContextValue("foo", "xyz")); assertFalse(profiler.setContextValue("xyz", "foo")); - ProfilingContextSetter fooSetter = new DatadogProfilerContextSetter("foo", profiler); - ProfilingContextSetter barSetter = new DatadogProfilerContextSetter("bar", profiler); + DatadogProfilerContextSetter fooSetter = new DatadogProfilerContextSetter("foo", profiler); + DatadogProfilerContextSetter barSetter = new DatadogProfilerContextSetter("bar", profiler); int[] snapshot0 = profiler.snapshot(); try (ProfilingScope outer = new DatadogProfilingScope(profiler)) { fooSetter.set("foo0"); diff --git a/dd-java-agent/agent-profiling/src/main/java/com/datadog/profiling/agent/ProfilingAgent.java b/dd-java-agent/agent-profiling/src/main/java/com/datadog/profiling/agent/ProfilingAgent.java index 6d4bc6891db..3a0832f5c8e 100644 --- a/dd-java-agent/agent-profiling/src/main/java/com/datadog/profiling/agent/ProfilingAgent.java +++ b/dd-java-agent/agent-profiling/src/main/java/com/datadog/profiling/agent/ProfilingAgent.java @@ -159,7 +159,7 @@ public static synchronized void run(final boolean isStartingFirst, ClassLoader a log.warn(e.getMessage()); log.debug("", e); } catch (final ConfigurationException e) { - log.warn("Failed to initialize profiling agent! " + e.getMessage()); + log.warn("Failed to initialize profiling agent! {}", e.getMessage()); log.debug("Failed to initialize profiling agent!", e); } } diff --git a/dd-java-agent/agent-tooling/build.gradle b/dd-java-agent/agent-tooling/build.gradle index 29e3ffa1ec9..93e2e2f84ff 100644 --- a/dd-java-agent/agent-tooling/build.gradle +++ b/dd-java-agent/agent-tooling/build.gradle @@ -43,7 +43,6 @@ dependencies { exclude group: 'com.datadoghq', module: 'agent-logging' } api group: 'com.blogspot.mydailyjava', name: 'weak-lock-free', version: '0.17' - api group: 'com.googlecode.concurrentlinkedhashmap', name: 'concurrentlinkedhashmap-lru', version: '1.4.2' api deps.bytebuddy api deps.bytebuddyagent implementation group: 'net.java.dev.jna', name: 'jna', version: '5.8.0' diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/AgentCLI.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/AgentCLI.java index 59db1a00ded..85114370c4c 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/AgentCLI.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/AgentCLI.java @@ -128,7 +128,7 @@ private static void recursiveDependencySearch(Consumer invoker, File origi private static void unzipJar(Consumer invoker, File file) throws IOException { try (JarFile jar = new JarFile(file)) { - log.debug("Finding entries in file:" + file.getName()); + log.debug("Finding entries in file: {}", file.getName()); jar.stream() .forEach( diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/Instrumenter.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/Instrumenter.java index e8fcb87bd66..8642a55d4fe 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/Instrumenter.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/Instrumenter.java @@ -7,6 +7,7 @@ import static java.util.Collections.singletonList; import static net.bytebuddy.matcher.ElementMatchers.isSynthetic; +import datadog.trace.agent.tooling.iast.IastPostProcessorFactory; import datadog.trace.agent.tooling.muzzle.Reference; import datadog.trace.agent.tooling.muzzle.ReferenceMatcher; import datadog.trace.agent.tooling.muzzle.ReferenceProvider; @@ -24,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import net.bytebuddy.asm.Advice; import net.bytebuddy.asm.AsmVisitorWrapper; import net.bytebuddy.description.method.MethodDescription; import net.bytebuddy.description.type.TypeDescription; @@ -117,6 +119,11 @@ interface WithTypeStructure { ElementMatcher structureMatcher(); } + /** Instrumentation that wants to apply additional structure checks after type matching. */ + interface WithPostProcessor { + Advice.PostProcessor.Factory postProcessor(); + } + /** Instrumentation that provides method advice. */ interface HasAdvice { /** @@ -314,8 +321,9 @@ public boolean isApplicable(Set enabledSystems) { @Override public boolean isEnabled() { - return !ConfigProvider.getInstance() - .getBoolean(ProfilingConfig.PROFILING_ULTRA_MINIMAL, false); + return super.isEnabled() + && !ConfigProvider.getInstance() + .getBoolean(ProfilingConfig.PROFILING_ULTRA_MINIMAL, false); } } @@ -333,7 +341,7 @@ public boolean isApplicable(Set enabledSystems) { /** Parent class for all IAST related instrumentations */ @SuppressForbidden - abstract class Iast extends Default { + abstract class Iast extends Default implements WithPostProcessor { private static final Logger log = LoggerFactory.getLogger(Instrumenter.Iast.class); @@ -374,6 +382,11 @@ private void preloadClassNames() { public String[] getClassNamesToBePreloaded() { return null; } + + @Override + public Advice.PostProcessor.Factory postProcessor() { + return IastPostProcessorFactory.INSTANCE; + } } /** Parent class for all USM related instrumentations */ diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/KnownTypesIndex.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/KnownTypesIndex.java index 1c6afeb6841..60388260b30 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/KnownTypesIndex.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/KnownTypesIndex.java @@ -70,7 +70,7 @@ public static KnownTypesIndex readIndex() { } return new KnownTypesIndex(multipleIdTable, ClassNameTrie.readFrom(in)); } catch (Throwable e) { - log.error("Problem reading " + KNOWN_TYPES_INDEX_NAME, e); + log.error("Problem reading {}", KNOWN_TYPES_INDEX_NAME, e); } } return buildIndex(); // fallback to runtime generation when testing diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/ClassFileLocators.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/ClassFileLocators.java index fadf94de971..c7b2d026204 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/ClassFileLocators.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/ClassFileLocators.java @@ -101,6 +101,8 @@ static Resolution loadClassResource(ClassLoader classLoader, String resourceName private ClassFileLocators() {} public static final class LazyResolution implements Resolution { + private static final Boolean USE_URL_CACHES = InstrumenterConfig.get().isResolverUseUrlCaches(); + private final URL url; private byte[] bytecode; @@ -122,7 +124,9 @@ public byte[] resolve() { if (null == bytecode) { try { URLConnection uc = url.openConnection(); - uc.setUseCaches(false); + if (null != USE_URL_CACHES) { + uc.setUseCaches(USE_URL_CACHES); + } try (InputStream in = uc.getInputStream()) { bytecode = StreamDrainer.DEFAULT.drain(in); } diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/csi/CallSiteTransformer.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/csi/CallSiteTransformer.java index 2d517dbb21f..50848a30ebf 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/csi/CallSiteTransformer.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/csi/CallSiteTransformer.java @@ -161,6 +161,11 @@ public void instruction(final int opcode) { mv.visitInsn(opcode); } + @Override + public void instruction(final int opcode, final int parameter) { + mv.visitIntInsn(opcode, parameter); + } + @Override public void instruction(final int opcode, final String type) { mv.visitTypeInsn(opcode, type); diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/iast/TaintableRedefinitionStrategyListener.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/iast/TaintableRedefinitionStrategyListener.java new file mode 100644 index 00000000000..d8b8284146d --- /dev/null +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/iast/TaintableRedefinitionStrategyListener.java @@ -0,0 +1,62 @@ +package datadog.trace.agent.tooling.bytebuddy.iast; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import javax.annotation.Nonnull; +import net.bytebuddy.agent.builder.AgentBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * {@link TaintableVisitor} redefines the structure of a class by adding interfaces and fields, + * meaning that it cannot be applied to already loaded classes. + * + *

This listener will disable the visitor to prevent a failure with the whole redefinition batch. + */ +public final class TaintableRedefinitionStrategyListener + extends AgentBuilder.RedefinitionStrategy.Listener.Adapter { + + private static final Logger LOGGER = + LoggerFactory.getLogger(TaintableRedefinitionStrategyListener.class); + private static final boolean DEBUG = LOGGER.isDebugEnabled(); + + public static final TaintableRedefinitionStrategyListener INSTANCE = + new TaintableRedefinitionStrategyListener(); + + private TaintableRedefinitionStrategyListener() {} + + @Override + @Nonnull + public Iterable>> onError( + final int index, + @Nonnull final List> batch, + @Nonnull final Throwable throwable, + @Nonnull final List> types) { + if (TaintableVisitor.ENABLED) { + if (DEBUG) { + LOGGER.debug( + "Exception while retransforming with the visitor in batch {}, disabling it", index); + } + TaintableVisitor.ENABLED = false; + return Collections.singletonList(batch); + } else { + if (DEBUG) { + LOGGER.debug( + "Exception while retransforming after disabling the visitor in batch {}, classes won't be instrumented", + index); + } + return Collections.emptyList(); + } + } + + @Override + public void onComplete( + final int amount, final List> types, final Map>, Throwable> failures) { + if (DEBUG) { + if (!TaintableVisitor.ENABLED) { + LOGGER.debug("Retransforming succeeded with a disabled visitor"); + } + } + } +} diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/iast/TaintableVisitor.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/iast/TaintableVisitor.java index 595b60a6e9b..1d644ba7580 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/iast/TaintableVisitor.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/iast/TaintableVisitor.java @@ -22,6 +22,7 @@ public class TaintableVisitor implements AsmVisitorWrapper { public static volatile boolean DEBUG = false; + static volatile boolean ENABLED = true; private static final String INTERFACE_NAME = "datadog/trace/api/iast/Taintable"; private static final String SOURCE_CLASS_NAME = "L" + INTERFACE_NAME + "$Source;"; @@ -55,9 +56,21 @@ public ClassVisitor wrap( final MethodList methods, final int writerFlags, final int readerFlags) { - return types.contains(instrumentedType.getName()) - ? new AddTaintableInterfaceVisitor(classVisitor) - : classVisitor; + if (ENABLED) { + return types.contains(instrumentedType.getName()) + ? new AddTaintableInterfaceVisitor(classVisitor) + : classVisitor; + } else { + return NoOp.INSTANCE.wrap( + instrumentedType, + classVisitor, + implementationContext, + typePool, + fields, + methods, + writerFlags, + readerFlags); + } } private static class AddTaintableInterfaceVisitor extends ClassVisitor { diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/matcher/HierarchyMatchers.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/matcher/HierarchyMatchers.java index 6826bd0f0d3..97b699e0803 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/matcher/HierarchyMatchers.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/matcher/HierarchyMatchers.java @@ -5,7 +5,7 @@ import datadog.trace.agent.tooling.bytebuddy.SharedTypePools; import de.thetaphi.forbiddenapis.SuppressForbidden; -import net.bytebuddy.description.DeclaredByType; +import net.bytebuddy.description.ByteCodeElement; import net.bytebuddy.description.NamedElement; import net.bytebuddy.description.annotation.AnnotationSource; import net.bytebuddy.description.field.FieldDescription; @@ -82,15 +82,17 @@ public static ElementMatcher.Junction declaresContextField( return SUPPLIER.declaresContextField(keyClassName, contextClassName); } + /** Use this to match annotated fields, methods, or method parameters. */ @SuppressForbidden - public static + public static > ElementMatcher.Junction isAnnotatedWith(NameMatchers.Named matcher) { SharedTypePools.annotationOfInterest(matcher.name); return ElementMatchers.isAnnotatedWith(matcher); } + /** Use this to match annotated fields, methods, or method parameters. */ @SuppressForbidden - public static + public static > ElementMatcher.Junction isAnnotatedWith(NameMatchers.OneOf matcher) { SharedTypePools.annotationsOfInterest(matcher.names); return ElementMatchers.isAnnotatedWith(matcher); diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/matcher/ScalaTraitMatchers.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/matcher/ScalaTraitMatchers.java new file mode 100644 index 00000000000..51464848f33 --- /dev/null +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/matcher/ScalaTraitMatchers.java @@ -0,0 +1,44 @@ +package datadog.trace.agent.tooling.bytebuddy.matcher; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.is; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.isStatic; +import static net.bytebuddy.matcher.ElementMatchers.not; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import net.bytebuddy.description.method.MethodDescription; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; + +public class ScalaTraitMatchers { + public static ElementMatcher.Junction isTraitMethod( + String traitName, String name, Object... argumentTypes) { + + ElementMatcher.Junction scalaOldArgs = + isStatic() + .and(takesArguments(argumentTypes.length + 1)) + .and(takesArgument(0, named(traitName))); + ElementMatcher.Junction scalaNewArgs = + not(isStatic()).and(takesArguments(argumentTypes.length)); + + for (int i = 0; i < argumentTypes.length; i++) { + Object argumentType = argumentTypes[i]; + ElementMatcher matcher; + if (argumentType instanceof ElementMatcher) { + matcher = (ElementMatcher) argumentType; + } else if (argumentType instanceof String) { + matcher = named((String) argumentType); + } else if (argumentType instanceof Class) { + matcher = is((Class) argumentType); + } else { + throw new IllegalArgumentException("Unexpected type for argument type specification"); + } + scalaOldArgs = scalaOldArgs.and(takesArgument(i + 1, matcher)); + scalaNewArgs = scalaNewArgs.and(takesArgument(i, matcher)); + } + + return isMethod().and(named(name)).and(scalaOldArgs.or(scalaNewArgs)); + } +} diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/outline/TypeFactory.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/outline/TypeFactory.java index ad9f291d8a2..cc0252bef51 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/outline/TypeFactory.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/outline/TypeFactory.java @@ -66,6 +66,9 @@ final class TypeFactory { } } + private static final boolean OUTLINING_ENABLED = + InstrumenterConfig.get().isResolverOutliningEnabled(); + private static final TypeParser outlineTypeParser = new OutlineTypeParser(); private static final TypeParser fullTypeParser = new FullTypeParser(); @@ -86,7 +89,7 @@ final class TypeFactory { boolean installing = false; - boolean createOutlines = true; + boolean createOutlines = OUTLINING_ENABLED; ClassLoader originalClassLoader; @@ -152,7 +155,7 @@ void enableFullDescriptions() { /** Temporarily turn off full description parsing; returns {@code true} if it was enabled. */ boolean disableFullDescriptions() { boolean wasEnabled = !createOutlines; - createOutlines = true; + createOutlines = OUTLINING_ENABLED; return wasEnabled; } @@ -171,7 +174,7 @@ void endTransform() { targetName = null; targetBytecode = null; - createOutlines = true; + createOutlines = OUTLINING_ENABLED; } private void clearReferences() { @@ -276,8 +279,8 @@ private TypeDescription loadType(String name, TypeParser typeParser) { Class loadedType; if (BOOTSTRAP_LOADER == classLoader) { loadedType = Class.forName(name, false, BOOTSTRAP_LOADER); - } else if (classLoader.getClass().getName().startsWith("groovy.lang.GroovyClassLoader")) { - return null; // avoid due to https://issues.apache.org/jira/browse/GROOVY-9742 + } else if (skipLoadClass(classLoader.getClass().getName())) { + return null; // avoid known problematic class-loaders } else { loadedType = classLoader.loadClass(name); } @@ -293,6 +296,12 @@ private TypeDescription loadType(String name, TypeParser typeParser) { } } + private static boolean skipLoadClass(String loaderClassName) { + // avoid due to https://issues.apache.org/jira/browse/GROOVY-9742 + return loaderClassName.startsWith("groovy.lang.GroovyClassLoader") + || loaderClassName.equals("org.apache.jasper.servlet.JasperLoader"); + } + /** Type description that begins with a name and provides more details on-demand. */ final class LazyType extends WithName implements WithLocation { private ClassFileLocator.Resolution location; @@ -369,7 +378,7 @@ private TypeDescription outline() { return doResolve(true); } // temporarily switch to generating (fast) outlines as that's all we need - createOutlines = true; + createOutlines = OUTLINING_ENABLED; try { return doResolve(true); } finally { diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/outline/TypePoolFacade.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/outline/TypePoolFacade.java index e4882db4b83..b5b2e3b39a8 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/outline/TypePoolFacade.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/outline/TypePoolFacade.java @@ -21,6 +21,7 @@ public static void registerAsSupplier() { @Override public TypePool typePool(ClassLoader classLoader) { + switchContext(classLoader); return INSTANCE; } diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/profiling/UnwrappingVisitor.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/profiling/UnwrappingVisitor.java index e1490e3c0d6..7d6b73d901c 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/profiling/UnwrappingVisitor.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/bytebuddy/profiling/UnwrappingVisitor.java @@ -58,7 +58,8 @@ public ClassVisitor wrap( private static class ImplementTaskWrapperClassVisitor extends ClassVisitor { - private static final String TASK_WRAPPER = "datadog/trace/bootstrap/TaskWrapper"; + private static final String TASK_WRAPPER = + "datadog/trace/bootstrap/instrumentation/api/TaskWrapper"; private final String className; private final String fieldName; diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/csi/CallSiteAdvice.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/csi/CallSiteAdvice.java index d00f58dd498..28c41fb820d 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/csi/CallSiteAdvice.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/csi/CallSiteAdvice.java @@ -9,6 +9,9 @@ interface MethodHandler { /** Executes an instruction without parameters */ void instruction(int opcode); + /** Executes an instruction with an int parameter */ + void instruction(final int opcode, final int parameter); + /** Executes an instruction with a type parameter */ void instruction(int opcode, String type); diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/iast/IastPostProcessorFactory.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/iast/IastPostProcessorFactory.java new file mode 100644 index 00000000000..2cfd9ab9ec5 --- /dev/null +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/iast/IastPostProcessorFactory.java @@ -0,0 +1,176 @@ +package datadog.trace.agent.tooling.iast; + +import static datadog.trace.api.iast.telemetry.IastMetric.EXECUTED_PROPAGATION; +import static datadog.trace.api.iast.telemetry.IastMetric.EXECUTED_SINK; +import static datadog.trace.api.iast.telemetry.IastMetric.EXECUTED_SOURCE; +import static datadog.trace.api.iast.telemetry.IastMetric.INSTRUMENTED_PROPAGATION; +import static datadog.trace.api.iast.telemetry.IastMetric.INSTRUMENTED_SINK; +import static datadog.trace.api.iast.telemetry.IastMetric.INSTRUMENTED_SOURCE; +import static net.bytebuddy.jar.asm.Opcodes.BIPUSH; +import static net.bytebuddy.jar.asm.Opcodes.GETSTATIC; +import static net.bytebuddy.jar.asm.Opcodes.ICONST_0; +import static net.bytebuddy.jar.asm.Opcodes.ICONST_1; +import static net.bytebuddy.jar.asm.Opcodes.ICONST_M1; +import static net.bytebuddy.jar.asm.Opcodes.INVOKESTATIC; + +import datadog.trace.api.Config; +import datadog.trace.api.iast.Propagation; +import datadog.trace.api.iast.Sink; +import datadog.trace.api.iast.Source; +import datadog.trace.api.iast.telemetry.IastMetric; +import datadog.trace.api.iast.telemetry.IastMetricCollector; +import datadog.trace.api.iast.telemetry.Verbosity; +import java.util.Collections; +import javax.annotation.Nonnull; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.annotation.AnnotationDescription; +import net.bytebuddy.description.annotation.AnnotationValue; +import net.bytebuddy.description.method.MethodDescription; +import net.bytebuddy.description.type.PackageDescription; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.implementation.Implementation; +import net.bytebuddy.implementation.bytecode.StackManipulation; +import net.bytebuddy.implementation.bytecode.assign.Assigner; +import net.bytebuddy.jar.asm.MethodVisitor; +import net.bytebuddy.jar.asm.Type; + +public class IastPostProcessorFactory implements Advice.PostProcessor.Factory { + + public static final Advice.PostProcessor.Factory INSTANCE; + + static { + final Verbosity verbosity = Config.get().getIastTelemetryVerbosity(); + INSTANCE = verbosity == Verbosity.OFF ? null : new IastPostProcessorFactory(verbosity); + } + + private static final String IAST_ANNOTATIONS_PKG = Sink.class.getPackage().getName(); + private static final String SINK_NAME = Sink.class.getSimpleName(); + private static final String PROPAGATION_NAME = Propagation.class.getSimpleName(); + private static final String SOURCE_NAME = Source.class.getSimpleName(); + + private static final String COLLECTOR_INTERNAL_NAME = + Type.getType(IastMetricCollector.class).getInternalName(); + private static final String METRIC_INTERNAL_NAME = + Type.getType(IastMetric.class).getInternalName(); + private static final String METRIC_DESCRIPTOR = "L" + METRIC_INTERNAL_NAME + ";"; + private static final String ADD_DESCRIPTOR = "(" + METRIC_DESCRIPTOR + "I)V"; + private static final String ADD_WITH_TAG_DESCRIPTOR = "(" + METRIC_DESCRIPTOR + "BI)V"; + + private final Verbosity verbosity; + + public IastPostProcessorFactory(final Verbosity verbosity) { + this.verbosity = verbosity; + } + + @Override + public @Nonnull Advice.PostProcessor make( + @Nonnull final MethodDescription.InDefinedShape advice, final boolean exit) { + for (final AnnotationDescription annotation : advice.getDeclaredAnnotations()) { + final TypeDescription typeDescr = annotation.getAnnotationType(); + final PackageDescription pkgDescr = typeDescr.getPackage(); + if (pkgDescr != null && IAST_ANNOTATIONS_PKG.equals(pkgDescr.getName())) { + final String typeName = typeDescr.getSimpleName(); + if (SINK_NAME.equals(typeName)) { + final AnnotationValue tag = annotation.getValue("value"); + return createPostProcessor(INSTRUMENTED_SINK, EXECUTED_SINK, tag.resolve(Byte.class)); + } else if (SOURCE_NAME.equals(typeName)) { + final AnnotationValue tag = annotation.getValue("value"); + return createPostProcessor(INSTRUMENTED_SOURCE, EXECUTED_SOURCE, tag.resolve(Byte.class)); + } else if (PROPAGATION_NAME.equals(typeName)) { + return createPostProcessor(INSTRUMENTED_PROPAGATION, EXECUTED_PROPAGATION, null); + } + } + } + return Advice.PostProcessor.NoOp.INSTANCE; + } + + private PostProcessor createPostProcessor( + final IastMetric instrumented, final IastMetric executed, final Byte tagValue) { + if (!executed.isEnabled(verbosity)) { + return new PostProcessor(instrumented, null, tagValue); + } + return new PostProcessor(instrumented, executed, tagValue); + } + + private static class PostProcessor implements Advice.PostProcessor { + + private final IastMetric instrumentation; + private final IastMetric runtime; + private final Byte tagValue; + + private PostProcessor( + final IastMetric instrumentation, final IastMetric runtime, final Byte tagValue) { + this.instrumentation = instrumentation; + this.runtime = runtime; + this.tagValue = tagValue; + } + + @Override + public @Nonnull StackManipulation resolve( + @Nonnull final TypeDescription instrumentedType, + @Nonnull final MethodDescription instrumentedMethod, + @Nonnull final Assigner assigner, + @Nonnull final Advice.ArgumentHandler argumentHandler, + @Nonnull final Advice.StackMapFrameHandler.ForPostProcessor stackMapFrameHandler, + @Nonnull final StackManipulation exceptionHandler) { + if (tagValue == null) { + IastMetricCollector.add(instrumentation, 1); + } else { + IastMetricCollector.add(instrumentation, (byte) tagValue, 1); + } + return runtime == null + ? StackManipulation.Trivial.INSTANCE + : new TelemetryStackManipulation(stackMapFrameHandler, runtime.name(), tagValue); + } + } + + private static class TelemetryStackManipulation extends StackManipulation.AbstractBase { + private final Advice.StackMapFrameHandler.ForAdvice.ForPostProcessor stackMapFrameHandler; + private final String metricName; + private final Byte tagValue; + + private TelemetryStackManipulation( + final Advice.StackMapFrameHandler.ForPostProcessor stackMapFrameHandler, + final String metricName, + final Byte tagValue) { + this.stackMapFrameHandler = stackMapFrameHandler; + this.metricName = metricName; + this.tagValue = tagValue; + } + + @Override + public @Nonnull Size apply( + @Nonnull final MethodVisitor mv, @Nonnull final Implementation.Context ctx) { + stackMapFrameHandler.injectIntermediateFrame(mv, Collections.emptyList()); + mv.visitFieldInsn(GETSTATIC, METRIC_INTERNAL_NAME, metricName, METRIC_DESCRIPTOR); + final String descriptor; + if (tagValue != null) { + visitTag(mv, tagValue); + descriptor = ADD_WITH_TAG_DESCRIPTOR; + } else { + descriptor = ADD_DESCRIPTOR; + } + mv.visitInsn(ICONST_1); + mv.visitMethodInsn(INVOKESTATIC, COLLECTOR_INTERNAL_NAME, "add", descriptor, false); + return new Size(0, tagValue != null ? 3 : 2); + } + + private void visitTag(final MethodVisitor mv, final byte tagValue) { + switch (tagValue) { + case -1: + mv.visitInsn(ICONST_M1); + break; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + mv.visitInsn(ICONST_0 + tagValue); + break; + default: + mv.visitIntInsn(BIPUSH, tagValue); + } + } + } +} diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/iast/TaintableEnumeration.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/iast/TaintableEnumeration.java new file mode 100644 index 00000000000..cfc8fb3b44f --- /dev/null +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/iast/TaintableEnumeration.java @@ -0,0 +1,101 @@ +package datadog.trace.agent.tooling.iast; + +import datadog.trace.api.iast.IastContext; +import datadog.trace.api.iast.propagation.PropagationModule; +import datadog.trace.util.stacktrace.StackUtils; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Enumeration; +import javax.annotation.Nullable; + +public class TaintableEnumeration implements Enumeration { + + private static final String CLASS_NAME = TaintableEnumeration.class.getName(); + + private volatile IastContext context; + private volatile boolean contextFetched; + + private final PropagationModule module; + + private final byte origin; + + private final CharSequence name; + + private final boolean useValueAsName; + + private final Enumeration delegate; + + private TaintableEnumeration( + @NonNull final Enumeration delegate, + @NonNull final PropagationModule module, + final byte origin, + @Nullable final CharSequence name, + final boolean useValueAsName) { + this.delegate = delegate; + this.module = module; + this.origin = origin; + this.name = name; + this.useValueAsName = useValueAsName; + } + + @Override + public boolean hasMoreElements() { + try { + return delegate.hasMoreElements(); + } catch (Throwable e) { + StackUtils.filterFirst(e, TaintableEnumeration::nonTaintableEnumerationStack); + throw e; + } + } + + @Override + public String nextElement() { + final String next; + try { + next = delegate.nextElement(); + } catch (Throwable e) { + StackUtils.filterFirst(e, TaintableEnumeration::nonTaintableEnumerationStack); + throw e; + } + try { + module.taint(context(), next, origin, name(next)); + } catch (final Throwable e) { + module.onUnexpectedException("Failed to taint enumeration", e); + } + return next; + } + + private IastContext context() { + if (!contextFetched) { + contextFetched = true; + context = IastContext.Provider.get(); + } + return context; + } + + private CharSequence name(final String value) { + if (name != null) { + return name; + } + return useValueAsName ? value : null; + } + + private static boolean nonTaintableEnumerationStack(final StackTraceElement element) { + return !CLASS_NAME.equals(element.getClassName()); + } + + public static Enumeration wrap( + @NonNull final Enumeration delegate, + @NonNull final PropagationModule module, + final byte origin, + @Nullable final CharSequence name) { + return new TaintableEnumeration(delegate, module, origin, name, false); + } + + public static Enumeration wrap( + @NonNull final Enumeration delegate, + @NonNull final PropagationModule module, + final byte origin, + boolean useValueAsName) { + return new TaintableEnumeration(delegate, module, origin, null, useValueAsName); + } +} diff --git a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/muzzle/MuzzleVersionScanPlugin.java b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/muzzle/MuzzleVersionScanPlugin.java index 72562cd902e..23b0b755f74 100644 --- a/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/muzzle/MuzzleVersionScanPlugin.java +++ b/dd-java-agent/agent-tooling/src/main/java/datadog/trace/agent/tooling/muzzle/MuzzleVersionScanPlugin.java @@ -164,6 +164,15 @@ public static void printMuzzleReferences(final ClassLoader instrumentationLoader } } + public static Set listInstrumentationNames( + final ClassLoader instrumentationLoader, String directive) throws Exception { + final Set ret = new HashSet<>(); + for (final Instrumenter.Default instrumenter : toBeTested(instrumentationLoader, directive)) { + ret.add(instrumenter.name()); + } + return ret; + } + private static String prettyPrint(final String prefix, final Reference ref) { final StringBuilder builder = new StringBuilder(prefix); builder.append(Reference.prettyPrint(ref.flags)); diff --git a/dd-java-agent/agent-tooling/src/main/resources/datadog/trace/agent/tooling/bytebuddy/matcher/ignored_class_name.trie b/dd-java-agent/agent-tooling/src/main/resources/datadog/trace/agent/tooling/bytebuddy/matcher/ignored_class_name.trie index ad2b96727af..d244f5a623b 100644 --- a/dd-java-agent/agent-tooling/src/main/resources/datadog/trace/agent/tooling/bytebuddy/matcher/ignored_class_name.trie +++ b/dd-java-agent/agent-tooling/src/main/resources/datadog/trace/agent/tooling/bytebuddy/matcher/ignored_class_name.trie @@ -42,7 +42,8 @@ 1 io.opentelemetry.javaagent.* 1 java.* # allow exception profiling instrumentation -0 java.lang.Throwable +0 java.lang.Exception +0 java.lang.Error # allow ProcessImpl instrumentation 0 java.lang.ProcessImpl 0 java.net.http.* @@ -52,6 +53,7 @@ 0 java.nio.DirectByteBuffer 0 java.nio.ByteBuffer 0 java.rmi.* +0 java.util.Timer 0 java.util.concurrent.* 1 java.util.concurrent.ConcurrentHashMap* 1 java.util.concurrent.atomic.* @@ -121,6 +123,9 @@ 2 akka.http.scaladsl.* 0 akka.http.scaladsl.Http2Ext 0 akka.http.scaladsl.HttpExt +0 akka.http.scaladsl.server.ExceptionHandler$ +0 akka.http.scaladsl.common.StrictForm$ +0 akka.http.impl.engine.server.HttpServerBluePrint$ControllerStage$$anon$* # saves ~0.1s skipping ~407 classes 2 akka.stream.* 0 akka.stream.impl.FanIn$SubInput @@ -132,6 +137,8 @@ 0 akka.http.javadsl.model.HttpHeader 0 akka.http.scaladsl.model.HttpRequest 0 akka.http.scaladsl.model.headers.* +0 akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport +0 akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport$class 0 akka.http.scaladsl.unmarshalling.* 0 akka.http.scaladsl.server.PathMatcher$Matched 0 akka.http.scaladsl.server.directives.ParameterDirectives$class @@ -141,6 +148,7 @@ 0 akka.http.scaladsl.server.directives.FormFieldDirectives$class 0 akka.http.scaladsl.server.directives.FormFieldDirectives 0 akka.http.scaladsl.model.Uri +0 akka.http.scaladsl.model.Multipart$FormData 0 akka.http.scaladsl.model.FormData 0 akka.http.scaladsl.server.RequestContextImpl 0 akka.http.scaladsl.server.directives.CookieDirectives$class @@ -185,6 +193,9 @@ 0 com.google.common.base.internal.Finalizer 0 com.google.common.util.concurrent.* 2 com.google.gson.* +# Need for IAST: we instrument this class +0 com.google.gson.Gson +0 com.google.gson.stream.JsonReader 2 com.google.inject.* # We instrument Runnable there 0 com.google.inject.internal.AbstractBindingProcessor$* @@ -244,6 +255,7 @@ 0 org.springframework.beans.factory.support.DefaultListableBeanFactory 0 org.springframework.beans.factory.support.DisposableBeanAdapter 2 org.springframework.boot.* +0 org.apache.xalan.transformer.TransformerImpl # More runnables to deal with 0 org.springframework.boot.autoconfigure.BackgroundPreinitializer$* 0 org.springframework.boot.autoconfigure.condition.OnClassCondition$* @@ -276,8 +288,6 @@ 2 org.springframework.expression.* 2 org.springframework.format.* 2 org.springframework.http.* -# Need for IAST: calls ServletRequest methods instrumented at callsite -0 org.springframework.http.server.ServletServerHttpRequest # Need for IAST: we instrument these classes 0 org.springframework.http.HttpHeaders 0 org.springframework.http.ReadOnlyHttpHeaders @@ -313,16 +323,15 @@ 2 org.springframework.validation.* 2 org.springframework.web.* 0 org.springframework.web.context.request.async.* -0 org.springframework.web.context.request.* 0 org.springframework.web.context.support.AbstractRefreshableWebApplicationContext 0 org.springframework.web.context.support.GenericWebApplicationContext 0 org.springframework.web.context.support.XmlWebApplicationContext 0 org.springframework.web.reactive.* 0 org.springframework.web.servlet.* -# Included for IAST -0 org.springframework.web.util.WebUtils -# Included for IAST Spring mvc unvalidated redirect vulnerability -0 org.springframework.web.method.support.InvocableHandlerMethod +# Need for IAST so propagation of tainted objects is complete in spring 2.7.5 +0 org.springframework.util.StreamUtils$NonClosingInputStream +# Included for IAST Spring mvc unvalidated redirect and xss vulnerability +0 org.springframework.web.method.support.HandlerMethodReturnValueHandlerComposite 2 org.xml.* 2 org.yaml.snakeyaml.* # saves ~0.5s skipping instrumentation of almost ~470 classes @@ -330,5 +339,8 @@ # -------- SPECIAL CASES -------- +# incomplete wrapper that throws exceptions for all but one method +1 reactor.rabbitmq.ChannelProxy + 3 com.mchange.v2.c3p0.* 4 org.springframework.http.converter.* diff --git a/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/WeakMapTest.groovy b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/WeakMapTest.groovy index 7c1f8eea0d4..e501161c6fe 100644 --- a/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/WeakMapTest.groovy +++ b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/WeakMapTest.groovy @@ -1,8 +1,8 @@ package datadog.trace.agent.tooling -import datadog.trace.test.util.Flaky import datadog.trace.test.util.GCUtils import datadog.trace.test.util.DDSpecification +import spock.lang.IgnoreIf import java.lang.ref.WeakReference import java.util.concurrent.TimeUnit @@ -36,6 +36,7 @@ class WeakMapTest extends DDSpecification { } //@Flaky("awaitGC usage is flaky") + @IgnoreIf(reason="Often fails in Semeru runtime", value = { System.getProperty("java.runtime.name").contains("Semeru") }) def "Unreferenced map gets cleaned up"() { setup: def map = WeakMaps.newWeakMap() @@ -51,6 +52,7 @@ class WeakMapTest extends DDSpecification { } //@Flaky("awaitGC usage is flaky") + @IgnoreIf(reason="Often fails in Semeru runtime", value = { System.getProperty("java.runtime.name").contains("Semeru") }) def "Unreferenced keys get cleaned up"() { setup: def key = new Object() diff --git a/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/iast/MrReturnAdvice.java b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/iast/MrReturnAdvice.java new file mode 100644 index 00000000000..88872a50f41 --- /dev/null +++ b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/iast/MrReturnAdvice.java @@ -0,0 +1,10 @@ +package datadog.trace.agent.tooling.bytebuddy.iast; + +import net.bytebuddy.asm.Advice; + +public class MrReturnAdvice { + @Advice.OnMethodExit + public static void sayHello(@Advice.Return(readOnly = false) String result) { + result = "Hello Mr!"; + } +} diff --git a/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/iast/TaintableVisitorTest.groovy b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/iast/TaintableVisitorTest.groovy index 41fd865afb4..88a51e3ab35 100644 --- a/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/iast/TaintableVisitorTest.groovy +++ b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/bytebuddy/iast/TaintableVisitorTest.groovy @@ -1,13 +1,32 @@ package datadog.trace.agent.tooling.bytebuddy.iast +import datadog.trace.agent.tooling.bytebuddy.LoadedTaintableClass import datadog.trace.api.iast.Taintable -import groovy.transform.CompileDynamic +import datadog.trace.test.util.DDSpecification import net.bytebuddy.ByteBuddy +import net.bytebuddy.agent.ByteBuddyAgent +import net.bytebuddy.agent.builder.AgentBuilder import net.bytebuddy.description.modifier.Visibility -import spock.lang.Specification +import net.bytebuddy.description.type.TypeDescription +import net.bytebuddy.dynamic.DynamicType +import net.bytebuddy.utility.JavaModule +import net.bytebuddy.utility.nullability.MaybeNull -@CompileDynamic -class TaintableVisitorTest extends Specification { +import java.security.ProtectionDomain + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named + +class TaintableVisitorTest extends DDSpecification { + + private boolean wasEnabled + + void setup() { + wasEnabled = TaintableVisitor.ENABLED + } + + void cleanup() { + TaintableVisitor.ENABLED = wasEnabled + } void 'test taintable visitor'() { given: @@ -81,4 +100,51 @@ class TaintableVisitorTest extends Specification { then: 1 * source.getOrigin() } + + void 'test taintable visitor with already loaded class'() { + given: + final instance = new LoadedTaintableClass() + final listener = Mock(AgentBuilder.RedefinitionStrategy.Listener) + + when: + final result = instance.sayHello() + + then: + result == 'Hello!' + + when: + new AgentBuilder.Default() + .disableClassFormatChanges() + .with(AgentBuilder.RedefinitionStrategy.RETRANSFORMATION) + .with(TaintableRedefinitionStrategyListener.INSTANCE) + .with(listener) + .type(named(LoadedTaintableClass.name)) + .transform(new AgentBuilder.Transformer.ForAdvice().advice(named('sayHello'), 'datadog.trace.agent.tooling.bytebuddy.iast.MrReturnAdvice')) + .transform(new AgentBuilder.Transformer() { + @Override + DynamicType.Builder transform(DynamicType.Builder builder, + TypeDescription typeDescription, + @MaybeNull ClassLoader classLoader, + @MaybeNull JavaModule module, + ProtectionDomain protectionDomain) { + return builder.visit(new TaintableVisitor(LoadedTaintableClass.name)) + } + }) + .installOn(ByteBuddyAgent.instrumentation) + + then: + final modifiedResult = instance.sayHello() + + then: + modifiedResult == 'Hello Mr!' + // failing initial batch + 1 * listener.onBatch(0, { List> list -> list.contains(LoadedTaintableClass) }, _) + 1 * listener.onError(0, { List> list -> list.contains(LoadedTaintableClass) }, _, _) >> [] + + // successful batch after disabling the visitor + 1 * listener.onBatch(1, { List> list -> list.contains(LoadedTaintableClass) }, _) + + // finally two batches where executed + 1 * listener.onComplete(2, { List> list -> list.contains(LoadedTaintableClass) }, _) + } } diff --git a/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/iast/IastPostProcessorFactoryTest.groovy b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/iast/IastPostProcessorFactoryTest.groovy new file mode 100644 index 00000000000..fb3757edf4a --- /dev/null +++ b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/iast/IastPostProcessorFactoryTest.groovy @@ -0,0 +1,83 @@ +package datadog.trace.agent.tooling.iast + +import datadog.trace.api.iast.telemetry.IastMetric +import datadog.trace.api.iast.telemetry.IastMetricCollector +import datadog.trace.test.util.DDSpecification +import net.bytebuddy.asm.Advice +import net.bytebuddy.description.method.MethodDescription +import net.bytebuddy.description.type.TypeDescription +import net.bytebuddy.implementation.Implementation +import net.bytebuddy.implementation.bytecode.StackManipulation +import net.bytebuddy.implementation.bytecode.assign.Assigner +import net.bytebuddy.jar.asm.MethodVisitor +import net.bytebuddy.jar.asm.Opcodes +import net.bytebuddy.jar.asm.Type + +class IastPostProcessorFactoryTest extends DDSpecification { + + private static final Type COLLECTOR_TYPE = Type.getType(IastMetricCollector) + private static final Type METRIC_TYPE = Type.getType(IastMetric) + + static class NonAnnotatedAdvice { + @Advice.OnMethodExit + static void exit() {} + } + + void 'test factory for non annotated'() { + given: + final method = new MethodDescription.ForLoadedMethod(NonAnnotatedAdvice.getDeclaredMethod('exit')) + + when: + final result = IastPostProcessorFactory.INSTANCE.make(method, true) + + then: + result == Advice.PostProcessor.NoOp.INSTANCE + } + + void 'test factory for annotated advice'() { + given: + final collector = IastMetricCollector.get() + final method = new MethodDescription.ForLoadedMethod(IastAnnotatedAdvice.getDeclaredMethod('exit')) + final typeDescription = Mock(TypeDescription) + final methodDescription = Mock(MethodDescription) + final assigner = Mock(Assigner) + final argumentHandler = Mock(Advice.ArgumentHandler) + final forPostProcessor = Mock(Advice.StackMapFrameHandler.ForPostProcessor) + final stackManipulation = Mock(StackManipulation) + final methodVisitor = Mock(MethodVisitor) + final context = Mock(Implementation.Context) + + when: + final postProcessor = IastPostProcessorFactory.INSTANCE.make(method, true) + + then: + postProcessor != Advice.PostProcessor.NoOp.INSTANCE + + when: 'a new advice is handled' + final manipulation = postProcessor.resolve(typeDescription, methodDescription, assigner, argumentHandler, forPostProcessor, stackManipulation) + + then: 'a new method has been instrumented and the metric is generated' + manipulation != null + collector.prepareMetrics() + final metrics = collector.drain() + assert metrics.size() == 1 + // one method has ben instrumented + metrics.first().with { + assert it.metric == IastMetric.INSTRUMENTED_SINK + assert it.tags == ['vulnerability_type:SQL_INJECTION'] + assert it.value == 1L + } + + when: 'the advice is used' + final size = manipulation.apply(methodVisitor, context) + + then: 'the byte code to generate the metric during runtime is appended' + size.sizeImpact == 0 // stack remains the same + size.maximalSize == 3 // metric + tag + counter + 1 * forPostProcessor.injectIntermediateFrame(methodVisitor, []) // new empty frame + 1 * methodVisitor.visitFieldInsn(Opcodes.GETSTATIC, METRIC_TYPE.internalName, 'EXECUTED_SINK', 'L' + METRIC_TYPE.internalName + ';') // add executed metric to stack + 1 * methodVisitor.visitInsn(Opcodes.ICONST_2) // add tag to stack: public static final byte SQL_INJECTION = 2 + 1 * methodVisitor.visitInsn(Opcodes.ICONST_1) // add counter to stack + 1 * methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, COLLECTOR_TYPE.internalName, 'add', '(L' + METRIC_TYPE.internalName + ';BI)V', false) // call increment + } +} diff --git a/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/iast/TaintableEnumerationTest.groovy b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/iast/TaintableEnumerationTest.groovy new file mode 100644 index 00000000000..78566a8bec7 --- /dev/null +++ b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/iast/TaintableEnumerationTest.groovy @@ -0,0 +1,99 @@ +package datadog.trace.agent.tooling.iast + +import datadog.trace.api.gateway.RequestContext +import datadog.trace.api.gateway.RequestContextSlot +import datadog.trace.api.iast.IastContext +import datadog.trace.api.iast.InstrumentationBridge +import datadog.trace.api.iast.SourceTypes +import datadog.trace.api.iast.propagation.PropagationModule +import datadog.trace.bootstrap.instrumentation.api.AgentSpan +import datadog.trace.bootstrap.instrumentation.api.AgentTracer +import datadog.trace.test.util.DDSpecification +import spock.lang.Shared + +class TaintableEnumerationTest extends DDSpecification { + + @Shared + protected static final AgentTracer.TracerAPI ORIGINAL_TRACER = AgentTracer.get() + + protected AgentTracer.TracerAPI tracer = Mock(AgentTracer.TracerAPI) + + protected IastContext iastCtx = Mock(IastContext) + + protected RequestContext reqCtx = Mock(RequestContext) { + getData(RequestContextSlot.IAST) >> iastCtx + } + + protected AgentSpan span = Mock(AgentSpan) { + getRequestContext() >> reqCtx + } + + protected PropagationModule module + + + void setup() { + AgentTracer.forceRegister(tracer) + module = Mock(PropagationModule) + InstrumentationBridge.registerIastModule(module) + } + + void cleanup() { + AgentTracer.forceRegister(ORIGINAL_TRACER) + InstrumentationBridge.clearIastModules() + } + + void 'underlying enumerated values are tainted with a name'() { + given: + final values = (1..10).collect { "value$it".toString() } + final origin = SourceTypes.REQUEST_PARAMETER_NAME + final name = 'test' + final enumeration = TaintableEnumeration.wrap(Collections.enumeration(values), module, origin, name) + + when: + final result = enumeration.collect() + + then: + result == values + values.each { 1 * module.taint(_, it, origin, name) } + 1 * tracer.activeSpan() >> span // only one access to the active context + } + + void 'underlying enumerated values are tainted with the value as a name'() { + given: + final values = (1..10).collect { "value$it".toString() } + final origin = SourceTypes.REQUEST_PARAMETER_NAME + final enumeration = TaintableEnumeration.wrap(Collections.enumeration(values), module, origin, true) + + when: + final result = enumeration.collect() + + then: + result == values + values.each { 1 * module.taint(_, it, origin, it) } + } + + void 'taintable enumeration leaves no trace in case of error'() { + given: + final origin = SourceTypes.REQUEST_PARAMETER_NAME + final enumeration = TaintableEnumeration.wrap(new BadEnumeration(), module, origin, true) + + when: + enumeration.hasMoreElements() + + then: + final first = thrown(Error) + first.stackTrace.find { it.className == TaintableEnumeration.name } == null + } + + private static class BadEnumeration implements Enumeration { + @Override + boolean hasMoreElements() { + throw new Error('Ooops!!!') + } + + @Override + String nextElement() { + throw new Error('Boom!!!') + } + } +} diff --git a/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/muzzle/ReferenceMatcherTest.groovy b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/muzzle/ReferenceMatcherTest.groovy index d04ec546e83..cdfcd656e51 100644 --- a/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/muzzle/ReferenceMatcherTest.groovy +++ b/dd-java-agent/agent-tooling/src/test/groovy/datadog/trace/agent/tooling/muzzle/ReferenceMatcherTest.groovy @@ -7,6 +7,7 @@ import datadog.trace.agent.tooling.muzzle.TestAdviceClasses.MethodBodyAdvice import datadog.trace.test.util.DDSpecification import net.bytebuddy.jar.asm.Type import net.bytebuddy.pool.TypePool +import spock.lang.IgnoreIf import spock.lang.Shared import static datadog.trace.agent.tooling.muzzle.Reference.EXPECTS_INTERFACE @@ -60,6 +61,7 @@ class ReferenceMatcherTest extends DDSpecification { getMismatchClassSet(refMatcher.getMismatchedReferenceSources(unsafeClasspath)) == new HashSet<>([MissingClass]) } + @IgnoreIf(reason="Often fails in Semeru runtime", value = { System.getProperty("java.runtime.name").contains("Semeru") }) def "matching does not hold a strong reference to classloaders"() { expect: MuzzleWeakReferenceTest.classLoaderRefIsGarbageCollected() diff --git a/dd-java-agent/agent-tooling/src/test/java/datadog/trace/agent/tooling/bytebuddy/LoadedTaintableClass.java b/dd-java-agent/agent-tooling/src/test/java/datadog/trace/agent/tooling/bytebuddy/LoadedTaintableClass.java new file mode 100644 index 00000000000..a851f988502 --- /dev/null +++ b/dd-java-agent/agent-tooling/src/test/java/datadog/trace/agent/tooling/bytebuddy/LoadedTaintableClass.java @@ -0,0 +1,8 @@ +package datadog.trace.agent.tooling.bytebuddy; + +public class LoadedTaintableClass { + + public String sayHello() { + return "Hello!"; + } +} diff --git a/dd-java-agent/agent-tooling/src/test/java/datadog/trace/agent/tooling/csi/StringConcatExample.java b/dd-java-agent/agent-tooling/src/test/java/datadog/trace/agent/tooling/csi/StringConcatExample.java index 136517b8c08..facd9ce4ab9 100644 --- a/dd-java-agent/agent-tooling/src/test/java/datadog/trace/agent/tooling/csi/StringConcatExample.java +++ b/dd-java-agent/agent-tooling/src/test/java/datadog/trace/agent/tooling/csi/StringConcatExample.java @@ -11,7 +11,7 @@ public class StringConcatExample implements BiFunction { public String apply(final String first, final String second) { LOGGER.debug("Before apply"); final String result = first.concat(second); - LOGGER.debug("After apply " + result); + LOGGER.debug("After apply {}", result); return result; } } diff --git a/dd-java-agent/agent-tooling/src/test/java/datadog/trace/agent/tooling/iast/IastAnnotatedAdvice.java b/dd-java-agent/agent-tooling/src/test/java/datadog/trace/agent/tooling/iast/IastAnnotatedAdvice.java new file mode 100644 index 00000000000..bed2f532fcb --- /dev/null +++ b/dd-java-agent/agent-tooling/src/test/java/datadog/trace/agent/tooling/iast/IastAnnotatedAdvice.java @@ -0,0 +1,11 @@ +package datadog.trace.agent.tooling.iast; + +import datadog.trace.api.iast.Sink; +import datadog.trace.api.iast.VulnerabilityTypes; +import net.bytebuddy.asm.Advice; + +public class IastAnnotatedAdvice { + @Advice.OnMethodExit + @Sink(VulnerabilityTypes.SQL_INJECTION) + static void exit() {} +} diff --git a/dd-java-agent/agent-tooling/src/test/java11/datadog/trace/agent/tooling/csi/StringPlusConstantsExample.java b/dd-java-agent/agent-tooling/src/test/java11/datadog/trace/agent/tooling/csi/StringPlusConstantsExample.java index 23df53b87a7..a71e58609a0 100644 --- a/dd-java-agent/agent-tooling/src/test/java11/datadog/trace/agent/tooling/csi/StringPlusConstantsExample.java +++ b/dd-java-agent/agent-tooling/src/test/java11/datadog/trace/agent/tooling/csi/StringPlusConstantsExample.java @@ -11,7 +11,7 @@ public class StringPlusConstantsExample implements TriFunction getEventSubscriptions(); - Collection getDataSubscriptions(); - abstract class EventSubscription implements EventListener { - public final EventType eventType; - private final Priority priority; - - protected EventSubscription(EventType eventType, Priority priority) { - this.eventType = eventType; - this.priority = priority; - } - - @Override - public Priority getPriority() { - return priority; - } - } - abstract class DataSubscription implements DataListener { private final Collection> subscribedAddresses; private final Priority priority; diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/AppSecSystem.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/AppSecSystem.java index feb3cd548f9..1fd373a1e74 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/AppSecSystem.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/AppSecSystem.java @@ -1,5 +1,6 @@ package com.datadog.appsec; +import com.datadog.appsec.api.security.ApiSecurityRequestSampler; import com.datadog.appsec.blocking.BlockingServiceImpl; import com.datadog.appsec.config.AppSecConfigService; import com.datadog.appsec.config.AppSecConfigServiceImpl; @@ -7,6 +8,7 @@ import com.datadog.appsec.event.ReplaceableEventProducerService; import com.datadog.appsec.gateway.GatewayBridge; import com.datadog.appsec.gateway.RateLimiter; +import com.datadog.appsec.powerwaf.PowerWAFModule; import com.datadog.appsec.util.AbortStartupException; import com.datadog.appsec.util.StandardizedLogging; import datadog.appsec.api.blocking.Blocking; @@ -23,8 +25,8 @@ import datadog.trace.util.Strings; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; -import java.util.ServiceLoader; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; @@ -75,11 +77,14 @@ private static void doStart(SubscriptionService gw, SharedCommunicationObjects s sco.createRemaining(config); RateLimiter rateLimiter = getRateLimiter(config, sco.monitoring); + ApiSecurityRequestSampler requestSampler = new ApiSecurityRequestSampler(config); + GatewayBridge gatewayBridge = new GatewayBridge( gw, REPLACEABLE_EVENT_PRODUCER, rateLimiter, + requestSampler, APP_SEC_CONFIG_SERVICE.getTraceSegmentPostProcessors()); loadModules(eventDispatcher); @@ -136,13 +141,10 @@ public static void stop() { } private static void loadModules(EventDispatcher eventDispatcher) { - EventDispatcher.EventSubscriptionSet eventSubscriptionSet = - new EventDispatcher.EventSubscriptionSet(); EventDispatcher.DataSubscriptionSet dataSubscriptionSet = new EventDispatcher.DataSubscriptionSet(); - ServiceLoader modules = - ServiceLoader.load(AppSecModule.class, AppSecSystem.class.getClassLoader()); + final List modules = Collections.singletonList(new PowerWAFModule()); for (AppSecModule module : modules) { log.debug("Starting appsec module {}", module.getName()); try { @@ -155,10 +157,6 @@ private static void loadModules(EventDispatcher eventDispatcher) { continue; } - for (AppSecModule.EventSubscription sub : module.getEventSubscriptions()) { - eventSubscriptionSet.addSubscription(sub.eventType, sub); - } - for (AppSecModule.DataSubscription sub : module.getDataSubscriptions()) { dataSubscriptionSet.addSubscription(sub.getSubscribedAddresses(), sub); } @@ -166,29 +164,21 @@ private static void loadModules(EventDispatcher eventDispatcher) { STARTED_MODULES_INFO.put(module, module.getInfo()); } - eventDispatcher.subscribeEvents(eventSubscriptionSet); eventDispatcher.subscribeDataAvailable(dataSubscriptionSet); } private static void reloadSubscriptions( ReplaceableEventProducerService replaceableEventProducerService) { - EventDispatcher.EventSubscriptionSet eventSubscriptionSet = - new EventDispatcher.EventSubscriptionSet(); EventDispatcher.DataSubscriptionSet dataSubscriptionSet = new EventDispatcher.DataSubscriptionSet(); EventDispatcher newEd = new EventDispatcher(); for (AppSecModule module : STARTED_MODULES_INFO.keySet()) { - for (AppSecModule.EventSubscription sub : module.getEventSubscriptions()) { - eventSubscriptionSet.addSubscription(sub.eventType, sub); - } - for (AppSecModule.DataSubscription sub : module.getDataSubscriptions()) { dataSubscriptionSet.addSubscription(sub.getSubscribedAddresses(), sub); } } - newEd.subscribeEvents(eventSubscriptionSet); newEd.subscribeDataAvailable(dataSubscriptionSet); replaceableEventProducerService.replaceEventProducerService(newEd); diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/api/security/ApiSecurityRequestSampler.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/api/security/ApiSecurityRequestSampler.java new file mode 100644 index 00000000000..cb345e7f94c --- /dev/null +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/api/security/ApiSecurityRequestSampler.java @@ -0,0 +1,37 @@ +package com.datadog.appsec.api.security; + +import datadog.trace.api.Config; +import java.util.concurrent.atomic.AtomicLong; + +public class ApiSecurityRequestSampler { + + private final int sampling; + private final AtomicLong cumulativeCounter = new AtomicLong(); + + public ApiSecurityRequestSampler(final Config config) { + sampling = computeSamplingParameter(config.getApiSecurityRequestSampleRate()); + } + + public boolean sampleRequest() { + long prevValue = cumulativeCounter.getAndAdd(sampling); + long newValue = prevValue + sampling; + if (newValue / 100 == prevValue / 100 + 1) { + // Sample request + return true; + } + // Skipped by sampling + return false; + } + + static int computeSamplingParameter(final float pct) { + if (pct >= 1) { + return 100; + } + if (pct < 0) { + // We don't support disabling Api Security by setting it, so we set it to 100%. + // TODO: We probably want a warning here. + return 100; + } + return (int) (pct * 100); + } +} diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/blocking/BlockingServiceImpl.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/blocking/BlockingServiceImpl.java index c64c0b95b5b..0a36358271f 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/blocking/BlockingServiceImpl.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/blocking/BlockingServiceImpl.java @@ -84,7 +84,8 @@ public boolean tryCommitBlockingResponse( log.debug("About to call block response function: {}", blockResponseFunction); boolean res = - blockResponseFunction.tryCommitBlockingResponse(statusCode, templateType, extraHeaders); + blockResponseFunction.tryCommitBlockingResponse( + reqCtx.getTraceSegment(), statusCode, templateType, extraHeaders); if (res) { TraceSegment traceSegment = reqCtx.getTraceSegment(); if (traceSegment != null) { diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/AppSecConfig.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/AppSecConfig.java index dbacf30e715..1977fc12cc7 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/AppSecConfig.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/AppSecConfig.java @@ -1,16 +1,19 @@ package com.datadog.appsec.config; import com.squareup.moshi.JsonAdapter; +import com.squareup.moshi.JsonReader; +import com.squareup.moshi.JsonWriter; import com.squareup.moshi.Moshi; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; +import javax.annotation.Nullable; public interface AppSecConfig { - Moshi MOSHI = new Moshi.Builder().build(); + Moshi MOSHI = new Moshi.Builder().add(Double.class, new NumberJsonAdapter()).build(); JsonAdapter ADAPTER_V1 = MOSHI.adapter(AppSecConfigV1.class); JsonAdapter ADAPTER_V2 = MOSHI.adapter(AppSecConfigV2.class); @@ -121,4 +124,18 @@ public int hashCode() { return hash; } } + + class NumberJsonAdapter extends JsonAdapter { + + @Nullable + @Override + public Number fromJson(JsonReader reader) throws IOException { + return reader.nextInt(); + } + + @Override + public void toJson(JsonWriter writer, @Nullable Number value) throws IOException { + writer.value(value); + } + } } diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/AppSecConfigServiceImpl.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/AppSecConfigServiceImpl.java index b305a1d75d4..ae58309bcd3 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/AppSecConfigServiceImpl.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/AppSecConfigServiceImpl.java @@ -8,6 +8,7 @@ import static datadog.remoteconfig.tuf.RemoteConfigRequest.ClientInfo.CAPABILITY_ASM_EXCLUSIONS; import static datadog.remoteconfig.tuf.RemoteConfigRequest.ClientInfo.CAPABILITY_ASM_IP_BLOCKING; import static datadog.remoteconfig.tuf.RemoteConfigRequest.ClientInfo.CAPABILITY_ASM_REQUEST_BLOCKING; +import static datadog.remoteconfig.tuf.RemoteConfigRequest.ClientInfo.CAPABILITY_ASM_TRUSTED_IPS; import static datadog.remoteconfig.tuf.RemoteConfigRequest.ClientInfo.CAPABILITY_ASM_USER_BLOCKING; import com.datadog.appsec.AppSecSystem; @@ -93,7 +94,8 @@ private void subscribeConfigurationPoller() { | CAPABILITY_ASM_REQUEST_BLOCKING | CAPABILITY_ASM_USER_BLOCKING | CAPABILITY_ASM_CUSTOM_RULES - | CAPABILITY_ASM_CUSTOM_BLOCKING_RESPONSE); + | CAPABILITY_ASM_CUSTOM_BLOCKING_RESPONSE + | CAPABILITY_ASM_TRUSTED_IPS); } private void subscribeRulesAndData() { @@ -184,7 +186,7 @@ private void distributeSubConfigurations( try { listener.onNewSubconfig(newConfig.get(key), reconfiguration); } catch (Exception rte) { - log.warn("Error updating configuration of app sec module listening on key " + key, rte); + log.warn("Error updating configuration of app sec module listening on key {}", key, rte); } } } @@ -328,7 +330,8 @@ public void close() { | CAPABILITY_ASM_REQUEST_BLOCKING | CAPABILITY_ASM_USER_BLOCKING | CAPABILITY_ASM_CUSTOM_RULES - | CAPABILITY_ASM_CUSTOM_BLOCKING_RESPONSE); + | CAPABILITY_ASM_CUSTOM_BLOCKING_RESPONSE + | CAPABILITY_ASM_TRUSTED_IPS); this.configurationPoller.removeListener(Product.ASM_DD); this.configurationPoller.removeListener(Product.ASM_DATA); this.configurationPoller.removeListener(Product.ASM); diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/CurrentAppSecConfig.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/CurrentAppSecConfig.java index 9cb3401b4f2..b661a876503 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/CurrentAppSecConfig.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/CurrentAppSecConfig.java @@ -17,7 +17,7 @@ import org.slf4j.LoggerFactory; public class CurrentAppSecConfig { - private static Logger log = LoggerFactory.getLogger(CurrentAppSecConfig.class); + private static final Logger log = LoggerFactory.getLogger(CurrentAppSecConfig.class); private AppSecConfig ddConfig; // assume there's only one of these CollectedUserConfigs userConfigs = new CollectedUserConfigs(); @@ -84,6 +84,11 @@ public AppSecConfig getMergedUpdateConfig() throws IOException { if (dirtyStatus.rules) { mso.put("metadata", ddConfig.getRawConfig().getOrDefault("metadata", Collections.emptyMap())); mso.put("rules", ddConfig.getRawConfig().getOrDefault("rules", Collections.emptyList())); + mso.put( + "processors", + ddConfig.getRawConfig().getOrDefault("processors", Collections.emptyList())); + mso.put( + "scanners", ddConfig.getRawConfig().getOrDefault("scanners", Collections.emptyList())); } if (dirtyStatus.customRules) { mso.put("custom_rules", getMergedCustomRules()); diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/TraceSegmentPostProcessor.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/TraceSegmentPostProcessor.java index 43aff89458d..a6440c6d6ea 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/TraceSegmentPostProcessor.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/config/TraceSegmentPostProcessor.java @@ -1,11 +1,11 @@ package com.datadog.appsec.config; import com.datadog.appsec.gateway.AppSecRequestContext; -import com.datadog.appsec.report.raw.events.AppSecEvent100; +import com.datadog.appsec.report.AppSecEvent; import datadog.trace.api.internal.TraceSegment; import java.util.Collection; public interface TraceSegmentPostProcessor { void processTraceSegment( - TraceSegment segment, AppSecRequestContext ctx, Collection collectedEvents); + TraceSegment segment, AppSecRequestContext ctx, Collection collectedEvents); } diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventDispatcher.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventDispatcher.java index a2c320c5cc3..6db67b01f43 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventDispatcher.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventDispatcher.java @@ -1,7 +1,5 @@ package com.datadog.appsec.event; -import static com.datadog.appsec.event.EventType.NUM_EVENT_TYPES; - import com.datadog.appsec.event.data.Address; import com.datadog.appsec.event.data.DataBundle; import com.datadog.appsec.event.data.KnownAddresses; @@ -10,7 +8,6 @@ import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -23,8 +20,6 @@ public class EventDispatcher implements EventProducerService { private static final Logger log = LoggerFactory.getLogger(EventDispatcher.class); private static final char[] EMPTY_CHAR_ARRAY = new char[0]; - private List> eventListeners; // index: eventType.serial - // indexes are the ids we successively attribute to listeners // we support up to 2^16 listeners in total // The listeners are ordered by priority (from highest to lowest) @@ -36,12 +31,6 @@ public class EventDispatcher implements EventProducerService { public EventDispatcher() { KnownAddresses.HEADERS_NO_COOKIES.getKey(); // force class initialization - // empty subscriptions - eventListeners = new ArrayList<>(NUM_EVENT_TYPES); - for (int i = 0; i < NUM_EVENT_TYPES; i++) { - eventListeners.add(Collections.emptyList()); - } - final int addressCount = Address.instanceCount(); dataListenerSubs = new ArrayList<>(addressCount); for (int i = 0; i < addressCount; i++) { @@ -49,31 +38,6 @@ public EventDispatcher() { } } - public static class EventSubscriptionSet { - private final List> eventListeners; // index: eventType.serial - - public EventSubscriptionSet() { - KnownAddresses.HEADERS_NO_COOKIES.getKey(); // force class initialization - - eventListeners = new ArrayList<>(NUM_EVENT_TYPES); - for (int i = 0; i < NUM_EVENT_TYPES; i++) { - eventListeners.add(new ArrayList<>(2)); - } - } - - public void addSubscription(EventType event, EventListener listener) { - List eventListeners = this.eventListeners.get(event.ordinal()); - eventListeners.add(listener); - } - } - - public void subscribeEvents(EventSubscriptionSet subscriptionSet) { - for (List eventListener : subscriptionSet.eventListeners) { - eventListener.sort(OrderedCallback.CallbackPriorityComparator.INSTANCE); - } - this.eventListeners = subscriptionSet.eventListeners; - } - public static class DataSubscriptionSet { private final Map indexes = new HashMap<>(); // index: addr.serial @@ -134,18 +98,6 @@ public void subscribeDataAvailable(DataSubscriptionSet subSet) { allSubscribedAddresses = subSet.allAddresses; } - @Override - public void publishEvent(AppSecRequestContext ctx, EventType event) { - List eventListeners = this.eventListeners.get(event.ordinal()); - for (EventListener listener : eventListeners) { - try { - listener.onEvent(ctx, event); - } catch (RuntimeException rte) { - log.warn("AppSec callback exception", rte); - } - } - } - @Override public DataSubscriberInfo getDataSubscribers(Address... newAddresses) { if (newAddresses.length == 1) { @@ -204,18 +156,6 @@ public Flow publishDataEvent( return flow; } - @Override - public Collection allSubscribedEvents() { - EventType[] values = EventType.values(); - List res = new ArrayList<>(values.length); - for (int i = 0; i < values.length; i++) { - if (!eventListeners.get(i).isEmpty()) { - res.add(values[i]); - } - } - return res; - } - @Override public Collection> allSubscribedDataAddresses() { return allSubscribedAddresses; diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventListener.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventListener.java deleted file mode 100644 index d35634f3c25..00000000000 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventListener.java +++ /dev/null @@ -1,7 +0,0 @@ -package com.datadog.appsec.event; - -import com.datadog.appsec.gateway.AppSecRequestContext; - -public interface EventListener extends OrderedCallback { - void onEvent(AppSecRequestContext ctx, EventType eventType); -} diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventProducerService.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventProducerService.java index cc60ad74476..202c422ccee 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventProducerService.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventProducerService.java @@ -7,13 +7,6 @@ import java.util.Collection; public interface EventProducerService { - /** - * Runs the event callbacks for the given event type. - * - *

This method does not throw. If one of the callbacks throws, the exception is caught and the - * processing continues. - */ - void publishEvent(AppSecRequestContext ctx, EventType event); /** * Determines the data callbacks for the given addresses. The return value can be cached if it's @@ -49,7 +42,5 @@ interface DataSubscriberInfo { boolean isEmpty(); } - Collection allSubscribedEvents(); - Collection> allSubscribedDataAddresses(); } diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventType.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventType.java deleted file mode 100644 index 8ae95380055..00000000000 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/EventType.java +++ /dev/null @@ -1,14 +0,0 @@ -package com.datadog.appsec.event; - -public enum EventType { - REQUEST_START, - REQUEST_END, - - // modules can subscribe to these two to implement heuristics - // dependent on whether the body is on the process of being read. - // After REQUEST_BODY_START, AppSecRequestContext::getStoredRequestBody() is avail - REQUEST_BODY_START, - REQUEST_BODY_END; - - static int NUM_EVENT_TYPES = EventType.values().length; -} diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/ReplaceableEventProducerService.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/ReplaceableEventProducerService.java index 99807f2dd45..1139dd52ad9 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/ReplaceableEventProducerService.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/ReplaceableEventProducerService.java @@ -13,11 +13,6 @@ public void replaceEventProducerService(EventProducerService ed) { this.cur = ed; } - @Override - public void publishEvent(AppSecRequestContext ctx, EventType event) { - cur.publishEvent(ctx, event); - } - @Override public DataSubscriberInfo getDataSubscribers(Address... newAddresses) { return cur.getDataSubscribers(newAddresses); @@ -33,11 +28,6 @@ public Flow publishDataEvent( return cur.publishDataEvent(subscribers, ctx, newData, isTransient); } - @Override - public Collection allSubscribedEvents() { - return cur.allSubscribedEvents(); - } - @Override public Collection> allSubscribedDataAddresses() { return cur.allSubscribedDataAddresses(); diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/data/KnownAddresses.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/data/KnownAddresses.java index 6efee9d1046..85b74f00b94 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/data/KnownAddresses.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/data/KnownAddresses.java @@ -45,6 +45,8 @@ public interface KnownAddresses { /** status code of HTTP response */ Address RESPONSE_STATUS = new Address<>("server.response.status"); + Address RESPONSE_BODY_OBJECT = new Address<>("server.response.body"); + /** First chars of HTTP response body */ Address RESPONSE_BODY_RAW = new Address<>("server.response.body.raw"); @@ -98,6 +100,8 @@ public interface KnownAddresses { Address USER_ID = new Address<>("usr.id"); + Address> WAF_CONTEXT_PROCESSOR = new Address<>("waf.context.processor"); + static Address forName(String name) { switch (name) { case "server.request.body": @@ -124,6 +128,8 @@ static Address forName(String name) { return REQUEST_TRANSPORT; case "server.response.status": return RESPONSE_STATUS; + case "server.response.body": + return RESPONSE_BODY_OBJECT; case "server.response.body.raw": return RESPONSE_BODY_RAW; case "server.response.headers.no_cookies": @@ -144,6 +150,8 @@ static Address forName(String name) { return GRPC_SERVER_REQUEST_METADATA; case "usr.id": return USER_ID; + case "waf.context.processor": + return WAF_CONTEXT_PROCESSOR; default: return null; } diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/data/MapDataBundle.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/data/MapDataBundle.java index 4dc590b9dda..14797179eef 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/data/MapDataBundle.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/event/data/MapDataBundle.java @@ -78,6 +78,10 @@ public Builder(int capacity) { } public , V> Builder add(A address, V value) { + if (address == null || value == null) return this; + if (value instanceof Collection && ((Collection) value).isEmpty()) return this; + if (value instanceof Map && ((Map) value).isEmpty()) return this; + map.put(address, value); return this; } diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/gateway/AppSecRequestContext.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/gateway/AppSecRequestContext.java index ce66c752b5b..728b5bad015 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/gateway/AppSecRequestContext.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/gateway/AppSecRequestContext.java @@ -3,9 +3,8 @@ import com.datadog.appsec.event.data.Address; import com.datadog.appsec.event.data.DataBundle; import com.datadog.appsec.event.data.KnownAddresses; -import com.datadog.appsec.report.raw.events.AppSecEvent100; +import com.datadog.appsec.report.AppSecEvent; import com.datadog.appsec.util.StandardizedLogging; -import datadog.trace.api.gateway.BlockResponseFunction; import datadog.trace.api.http.StoredBodySupplier; import datadog.trace.api.internal.TraceSegment; import io.sqreen.powerwaf.Additive; @@ -51,7 +50,7 @@ public class AppSecRequestContext implements DataBundle, Closeable { "accept-language")); private final ConcurrentHashMap, Object> persistentData = new ConcurrentHashMap<>(); - private Collection collectedEvents; // guarded by this + private Collection collectedEvents; // guarded by this // assume these will always be written and read by the same thread private String scheme; @@ -74,7 +73,7 @@ public class AppSecRequestContext implements DataBundle, Closeable { private boolean rawReqBodyPublished; private boolean convertedReqBodyPublished; private boolean respDataPublished; - private BlockResponseFunction blockResponseFunction; + private Map apiSchemas; // should be guarded by this private Additive additive; @@ -333,10 +332,6 @@ public void setRespDataPublished(boolean respDataPublished) { this.respDataPublished = respDataPublished; } - public void setBlockResponseFunction(BlockResponseFunction blockResponseFunction) { - this.blockResponseFunction = blockResponseFunction; - } - @Override public void close() { synchronized (this) { @@ -362,8 +357,8 @@ public CharSequence getStoredRequestBody() { return storedRequestBodySupplier.get(); } - public void reportEvents(Collection events, TraceSegment traceSegment) { - for (AppSecEvent100 event : events) { + public void reportEvents(Collection events) { + for (AppSecEvent event : events) { StandardizedLogging.attackDetected(log, event); } synchronized (this) { @@ -378,8 +373,8 @@ public void reportEvents(Collection events, TraceSegment traceSe } } - Collection transferCollectedEvents() { - Collection events; + Collection transferCollectedEvents() { + Collection events; synchronized (this) { events = this.collectedEvents; this.collectedEvents = Collections.emptyList(); @@ -390,4 +385,22 @@ Collection transferCollectedEvents() { return Collections.emptyList(); } } + + public void reportApiSchemas(Map schemas) { + if (schemas == null || schemas.isEmpty()) return; + + if (apiSchemas == null) { + apiSchemas = schemas; + } else { + apiSchemas.putAll(schemas); + } + } + + boolean commitApiSchemas(TraceSegment traceSegment) { + if (traceSegment == null || apiSchemas == null) { + return false; + } + apiSchemas.forEach(traceSegment::setTagTop); + return true; + } } diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/gateway/GatewayBridge.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/gateway/GatewayBridge.java index 86c212c82fd..e89927ebb44 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/gateway/GatewayBridge.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/gateway/GatewayBridge.java @@ -3,10 +3,10 @@ import static com.datadog.appsec.event.data.MapDataBundle.Builder.CAPACITY_6_10; import com.datadog.appsec.AppSecSystem; +import com.datadog.appsec.api.security.ApiSecurityRequestSampler; import com.datadog.appsec.config.TraceSegmentPostProcessor; import com.datadog.appsec.event.EventProducerService; import com.datadog.appsec.event.EventProducerService.DataSubscriberInfo; -import com.datadog.appsec.event.EventType; import com.datadog.appsec.event.ExpiredSubscriberInfoException; import com.datadog.appsec.event.data.Address; import com.datadog.appsec.event.data.DataBundle; @@ -14,8 +14,9 @@ import com.datadog.appsec.event.data.MapDataBundle; import com.datadog.appsec.event.data.ObjectIntrospection; import com.datadog.appsec.event.data.SingletonDataBundle; +import com.datadog.appsec.report.AppSecEvent; import com.datadog.appsec.report.AppSecEventWrapper; -import com.datadog.appsec.report.raw.events.AppSecEvent100; +import datadog.trace.api.Config; import datadog.trace.api.DDTags; import datadog.trace.api.function.TriConsumer; import datadog.trace.api.function.TriFunction; @@ -61,6 +62,7 @@ public class GatewayBridge { private final SubscriptionService subscriptionService; private final EventProducerService producerService; private final RateLimiter rateLimiter; + private final ApiSecurityRequestSampler requestSampler; private final List traceSegmentPostProcessors; // subscriber cache @@ -75,10 +77,12 @@ public GatewayBridge( SubscriptionService subscriptionService, EventProducerService producerService, RateLimiter rateLimiter, + ApiSecurityRequestSampler requestSampler, List traceSegmentPostProcessors) { this.subscriptionService = subscriptionService; this.producerService = producerService; this.rateLimiter = rateLimiter; + this.requestSampler = requestSampler; this.traceSegmentPostProcessors = traceSegmentPostProcessors; } @@ -86,7 +90,7 @@ public void init() { Events events = Events.get(); Collection> additionalIGEvents = IGAppSecEventDependencies.additionalIGEventTypes( - producerService.allSubscribedEvents(), producerService.allSubscribedDataAddresses()); + producerService.allSubscribedDataAddresses()); subscriptionService.registerCallback( events.requestStarted(), @@ -94,12 +98,7 @@ public void init() { if (!AppSecSystem.isActive()) { return RequestContextSupplier.EMPTY; } - - RequestContextSupplier requestContextSupplier = new RequestContextSupplier(); - AppSecRequestContext ctx = requestContextSupplier.getResult(); - producerService.publishEvent(ctx, EventType.REQUEST_START); - - return requestContextSupplier; + return new RequestContextSupplier(); }); subscriptionService.registerCallback( @@ -110,7 +109,8 @@ public void init() { return NoopFlow.INSTANCE; } - producerService.publishEvent(ctx, EventType.REQUEST_END); + // WAF call + ctx.closeAdditive(); TraceSegment traceSeg = ctx_.getTraceSegment(); @@ -119,7 +119,7 @@ public void init() { traceSeg.setTagTop("_dd.appsec.enabled", 1); traceSeg.setTagTop("_dd.runtime_family", "jvm"); - Collection collectedEvents = ctx.transferCollectedEvents(); + Collection collectedEvents = ctx.transferCollectedEvents(); for (TraceSegmentPostProcessor pp : this.traceSegmentPostProcessors) { pp.processTraceSegment(traceSeg, ctx, collectedEvents); @@ -169,6 +169,11 @@ public void init() { }); } } + + // If extracted any Api Schemas - commit them + if (!ctx.commitApiSchemas(traceSeg)) { + log.debug("Unable to commit, api security schemas and will be skipped"); + } } ctx.close(); @@ -182,20 +187,17 @@ public void init() { subscriptionService.registerCallback( EVENTS.requestMethodUriRaw(), new MethodAndRawURICallback()); - if (additionalIGEvents.contains(EVENTS.requestBodyStart())) { - subscriptionService.registerCallback( - EVENTS.requestBodyStart(), - (RequestContext ctx_, StoredBodySupplier supplier) -> { - AppSecRequestContext ctx = ctx_.getData(RequestContextSlot.APPSEC); - if (ctx == null) { - return null; - } - - ctx.setStoredRequestBodySupplier(supplier); - producerService.publishEvent(ctx, EventType.REQUEST_BODY_START); + subscriptionService.registerCallback( + EVENTS.requestBodyStart(), + (RequestContext ctx_, StoredBodySupplier supplier) -> { + AppSecRequestContext ctx = ctx_.getData(RequestContextSlot.APPSEC); + if (ctx == null) { return null; - }); - } + } + + ctx.setStoredRequestBodySupplier(supplier); + return null; + }); if (additionalIGEvents.contains(EVENTS.requestPathParams())) { subscriptionService.registerCallback( @@ -223,8 +225,7 @@ public void init() { DataBundle bundle = new SingletonDataBundle<>(KnownAddresses.REQUEST_PATH_PARAMS, data); try { - Flow flow = producerService.publishDataEvent(subInfo, ctx, bundle, false); - return flow; + return producerService.publishDataEvent(subInfo, ctx, bundle, false); } catch (ExpiredSubscriberInfoException e) { pathParamsSubInfo = null; } @@ -232,42 +233,38 @@ public void init() { }); } - if (additionalIGEvents.contains(EVENTS.requestBodyDone())) { - subscriptionService.registerCallback( - EVENTS.requestBodyDone(), - (RequestContext ctx_, StoredBodySupplier supplier) -> { - AppSecRequestContext ctx = ctx_.getData(RequestContextSlot.APPSEC); - if (ctx == null || ctx.isRawReqBodyPublished()) { + subscriptionService.registerCallback( + EVENTS.requestBodyDone(), + (RequestContext ctx_, StoredBodySupplier supplier) -> { + AppSecRequestContext ctx = ctx_.getData(RequestContextSlot.APPSEC); + if (ctx == null || ctx.isRawReqBodyPublished()) { + return NoopFlow.INSTANCE; + } + ctx.setRawReqBodyPublished(true); + + while (true) { + DataSubscriberInfo subInfo = rawRequestBodySubInfo; + if (subInfo == null) { + subInfo = producerService.getDataSubscribers(KnownAddresses.REQUEST_BODY_RAW); + rawRequestBodySubInfo = subInfo; + } + if (subInfo == null || subInfo.isEmpty()) { return NoopFlow.INSTANCE; } - ctx.setRawReqBodyPublished(true); - - producerService.publishEvent(ctx, EventType.REQUEST_BODY_END); - while (true) { - DataSubscriberInfo subInfo = rawRequestBodySubInfo; - if (subInfo == null) { - subInfo = producerService.getDataSubscribers(KnownAddresses.REQUEST_BODY_RAW); - rawRequestBodySubInfo = subInfo; - } - if (subInfo == null || subInfo.isEmpty()) { - return NoopFlow.INSTANCE; - } - - CharSequence bodyContent = supplier.get(); - if (bodyContent == null || bodyContent.length() == 0) { - return NoopFlow.INSTANCE; - } - DataBundle bundle = - new SingletonDataBundle<>(KnownAddresses.REQUEST_BODY_RAW, bodyContent); - try { - return producerService.publishDataEvent(subInfo, ctx, bundle, false); - } catch (ExpiredSubscriberInfoException e) { - rawRequestBodySubInfo = null; - } + CharSequence bodyContent = supplier.get(); + if (bodyContent == null || bodyContent.length() == 0) { + return NoopFlow.INSTANCE; } - }); - } + DataBundle bundle = + new SingletonDataBundle<>(KnownAddresses.REQUEST_BODY_RAW, bodyContent); + try { + return producerService.publishDataEvent(subInfo, ctx, bundle, false); + } catch (ExpiredSubscriberInfoException e) { + rawRequestBodySubInfo = null; + } + } + }); if (additionalIGEvents.contains(EVENTS.requestBodyProcessed())) { subscriptionService.registerCallback( @@ -380,8 +377,7 @@ public void init() { DataBundle bundle = new SingletonDataBundle<>(KnownAddresses.GRPC_SERVER_REQUEST_MESSAGE, convObj); try { - return producerService.publishDataEvent( - grpcServerRequestMsgSubInfo, ctx, bundle, true); + return producerService.publishDataEvent(subInfo, ctx, bundle, true); } catch (ExpiredSubscriberInfoException e) { grpcServerRequestMsgSubInfo = null; } @@ -519,8 +515,9 @@ private Flow maybePublishRequestData(AppSecRequestContext ctx) { .build(); while (true) { - if (initialReqDataSubInfo == null) { - initialReqDataSubInfo = + DataSubscriberInfo subInfo = this.initialReqDataSubInfo; + if (subInfo == null) { + subInfo = producerService.getDataSubscribers( KnownAddresses.HEADERS_NO_COOKIES, KnownAddresses.REQUEST_COOKIES, @@ -531,12 +528,13 @@ private Flow maybePublishRequestData(AppSecRequestContext ctx) { KnownAddresses.REQUEST_CLIENT_IP, KnownAddresses.REQUEST_CLIENT_PORT, KnownAddresses.REQUEST_INFERRED_CLIENT_IP); + initialReqDataSubInfo = subInfo; } try { - return producerService.publishDataEvent(initialReqDataSubInfo, ctx, bundle, false); + return producerService.publishDataEvent(subInfo, ctx, bundle, false); } catch (ExpiredSubscriberInfoException e) { - initialReqDataSubInfo = null; + this.initialReqDataSubInfo = null; } } } @@ -551,20 +549,30 @@ private Flow maybePublishResponseData(AppSecRequestContext ctx) { ctx.setRespDataPublished(true); + boolean extractSchema = false; + if (Config.get().isApiSecurityEnabled() && requestSampler != null) { + extractSchema = requestSampler.sampleRequest(); + } + MapDataBundle bundle = MapDataBundle.of( KnownAddresses.RESPONSE_STATUS, String.valueOf(ctx.getResponseStatus()), - KnownAddresses.RESPONSE_HEADERS_NO_COOKIES, ctx.getResponseHeaders()); + KnownAddresses.RESPONSE_HEADERS_NO_COOKIES, ctx.getResponseHeaders(), + // Extract api schema on response stage + KnownAddresses.WAF_CONTEXT_PROCESSOR, + Collections.singletonMap("extract-schema", extractSchema)); while (true) { - if (respDataSubInfo == null) { - respDataSubInfo = + DataSubscriberInfo subInfo = respDataSubInfo; + if (subInfo == null) { + subInfo = producerService.getDataSubscribers( KnownAddresses.RESPONSE_STATUS, KnownAddresses.RESPONSE_HEADERS_NO_COOKIES); + respDataSubInfo = subInfo; } try { - return producerService.publishDataEvent(respDataSubInfo, ctx, bundle, false); + return producerService.publishDataEvent(subInfo, ctx, bundle, false); } catch (ExpiredSubscriberInfoException e) { respDataSubInfo = null; } @@ -637,16 +645,11 @@ private static int byteToDigit(byte b) { } private static class IGAppSecEventDependencies { - private static final Map>> - EVENT_DEPENDENCIES = new HashMap<>(3); // ceil(2 / .75) private static final Map, Collection>> DATA_DEPENDENCIES = new HashMap<>(4); static { - EVENT_DEPENDENCIES.put(EventType.REQUEST_BODY_START, l(EVENTS.requestBodyStart())); - EVENT_DEPENDENCIES.put(EventType.REQUEST_BODY_END, l(EVENTS.requestBodyDone())); - DATA_DEPENDENCIES.put( KnownAddresses.REQUEST_BODY_RAW, l(EVENTS.requestBodyStart(), EVENTS.requestBodyDone())); DATA_DEPENDENCIES.put(KnownAddresses.REQUEST_PATH_PARAMS, l(EVENTS.requestPathParams())); @@ -659,14 +662,8 @@ private static Collection> l( } static Collection> additionalIGEventTypes( - Collection eventTypes, Collection> addresses) { + Collection> addresses) { Set> res = new HashSet<>(); - for (EventType eventType : eventTypes) { - Collection> c = EVENT_DEPENDENCIES.get(eventType); - if (c != null) { - res.addAll(c); - } - } for (Address address : addresses) { Collection> c = DATA_DEPENDENCIES.get(address); if (c != null) { diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFInitializationResultReporter.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFInitializationResultReporter.java index f279f13a985..74d973d7134 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFInitializationResultReporter.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFInitializationResultReporter.java @@ -2,7 +2,7 @@ import com.datadog.appsec.config.TraceSegmentPostProcessor; import com.datadog.appsec.gateway.AppSecRequestContext; -import com.datadog.appsec.report.raw.events.AppSecEvent100; +import com.datadog.appsec.report.AppSecEvent; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; import com.squareup.moshi.Types; @@ -35,7 +35,7 @@ public void setReportForPublication(RuleSetInfo report) { @Override public void processTraceSegment( - TraceSegment segment, AppSecRequestContext ctx, Collection collectedEvents) { + TraceSegment segment, AppSecRequestContext ctx, Collection collectedEvents) { RuleSetInfo report = pendingReportRef.get(); if (report == null) { return; diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFModule.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFModule.java index a2777e6bad1..d71e112877c 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFModule.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFModule.java @@ -9,18 +9,15 @@ import com.datadog.appsec.config.AppSecModuleConfigurer; import com.datadog.appsec.config.CurrentAppSecConfig; import com.datadog.appsec.event.ChangeableFlow; -import com.datadog.appsec.event.EventType; import com.datadog.appsec.event.data.Address; import com.datadog.appsec.event.data.DataBundle; import com.datadog.appsec.event.data.KnownAddresses; import com.datadog.appsec.gateway.AppSecRequestContext; -import com.datadog.appsec.report.raw.events.AppSecEvent100; -import com.datadog.appsec.report.raw.events.Parameter; -import com.datadog.appsec.report.raw.events.Rule; -import com.datadog.appsec.report.raw.events.RuleMatch; -import com.datadog.appsec.report.raw.events.Tags; +import com.datadog.appsec.report.AppSecEvent; +import com.datadog.appsec.report.Parameter; +import com.datadog.appsec.report.Rule; +import com.datadog.appsec.report.RuleMatch; import com.datadog.appsec.util.StandardizedLogging; -import com.google.auto.service.AutoService; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; import com.squareup.moshi.Types; @@ -37,6 +34,7 @@ import io.sqreen.powerwaf.RuleSetInfo; import io.sqreen.powerwaf.exception.AbstractPowerwafException; import io.sqreen.powerwaf.exception.InvalidRuleSetException; +import io.sqreen.powerwaf.exception.TimeoutPowerwafException; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationHandler; @@ -64,7 +62,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@AutoService(AppSecModule.class) public class PowerWAFModule implements AppSecModule { private static final Logger log = LoggerFactory.getLogger(PowerWAFModule.class); @@ -75,7 +72,6 @@ public class PowerWAFModule implements AppSecModule { private static final Class PROXY_CLASS = Proxy.getProxyClass(PowerWAFModule.class.getClassLoader(), Set.class); private static final Constructor PROXY_CLASS_CONSTRUCTOR; - private static final Set EVENTS_OF_INTEREST; private static final JsonAdapter> RES_JSON_ADAPTER; @@ -117,10 +113,6 @@ CtxAndAddresses withNewActions(Map actionInfoMap) { throw new UndeclaredThrowableException(e); } - EVENTS_OF_INTEREST = new HashSet<>(); - EVENTS_OF_INTEREST.add(EventType.REQUEST_START); - EVENTS_OF_INTEREST.add(EventType.REQUEST_END); - Moshi moshi = new Moshi.Builder().build(); RES_JSON_ADAPTER = moshi.adapter(Types.newParameterizedType(List.class, PowerWAFResultData.class)); @@ -245,7 +237,7 @@ private void initializeNewWafCtx( initReport = newPwafCtx.getRuleSetInfo(); Collection> addresses = getUsedAddresses(newPwafCtx); - // Update current rules' version if need + // Update current rules' version if you need if (initReport != null && initReport.rulesetVersion != null) { currentRulesVersion = initReport.rulesetVersion; } @@ -256,12 +248,18 @@ private void initializeNewWafCtx( WafMetricCollector.get().wafUpdates(currentRulesVersion); } - log.info( - "Created {} WAF context with rules ({} OK, {} BAD), version {}", - prevContextAndAddresses == null ? "new" : "updated", - initReport.getNumRulesOK(), - initReport.getNumRulesError(), - initReport.rulesetVersion); + if (initReport != null) { + log.info( + "Created {} WAF context with rules ({} OK, {} BAD), version {}", + prevContextAndAddresses == null ? "new" : "updated", + initReport.getNumRulesOK(), + initReport.getNumRulesError(), + initReport.rulesetVersion); + } else { + log.warn( + "Created {} WAF context without rules", + prevContextAndAddresses == null ? "new" : "updated"); + } Map actionInfoMap = calculateEffectiveActions(prevContextAndAddresses, ruleConfig); @@ -356,24 +354,6 @@ public String getInfo() { return "powerwaf(libddwaf: " + Powerwaf.LIB_VERSION + ") loaded"; } - @Override - public Collection getEventSubscriptions() { - return singletonList(new PowerWAFEventsCallback()); - } - - private static class PowerWAFEventsCallback extends EventSubscription { - public PowerWAFEventsCallback() { - super(EventType.REQUEST_END, Priority.DEFAULT); - } - - @Override - public void onEvent(AppSecRequestContext reqCtx, EventType eventType) { - if (eventType == EventType.REQUEST_END) { - reqCtx.closeAdditive(); - } - } - } - @Override public Collection getDataSubscriptions() { if (this.ctxAndAddresses.get() == null) { @@ -384,7 +364,7 @@ public Collection getDataSubscriptions() { private static Collection> getUsedAddresses(PowerwafContext ctx) { String[] usedAddresses = ctx.getUsedAddresses(); - List> addressList = new ArrayList<>(usedAddresses.length); + Set> addressList = new HashSet<>(usedAddresses.length); for (String addrKey : usedAddresses) { Address address = KnownAddresses.forName(addrKey); if (address != null) { @@ -393,6 +373,17 @@ private static Collection> getUsedAddresses(PowerwafContext ctx) { log.warn("WAF has rule against unknown address {}", addrKey); } } + + // TODO: get addresses dynamically when will it be implemented in waf + addressList.add(KnownAddresses.WAF_CONTEXT_PROCESSOR); + addressList.add(KnownAddresses.HEADERS_NO_COOKIES); + addressList.add(KnownAddresses.REQUEST_QUERY); + addressList.add(KnownAddresses.REQUEST_PATH_PARAMS); + addressList.add(KnownAddresses.REQUEST_COOKIES); + addressList.add(KnownAddresses.REQUEST_BODY_RAW); + addressList.add(KnownAddresses.RESPONSE_HEADERS_NO_COOKIES); + addressList.add(KnownAddresses.RESPONSE_BODY_OBJECT); + return addressList; } @@ -410,23 +401,26 @@ public void onDataAvailable( log.debug("Skipped; the WAF is not configured"); return; } - try { - StandardizedLogging.executingWAF(log); - long start = 0L; - if (log.isDebugEnabled()) { - start = System.currentTimeMillis(); - } - resultWithData = doRunPowerwaf(reqCtx, newData, ctxAndAddr, isTransient); + StandardizedLogging.executingWAF(log); + long start = 0L; + if (log.isDebugEnabled()) { + start = System.currentTimeMillis(); + } + try { + resultWithData = doRunPowerwaf(reqCtx, newData, ctxAndAddr, isTransient); + } catch (TimeoutPowerwafException tpe) { + log.debug("Timeout calling the WAF", tpe); + return; + } catch (AbstractPowerwafException e) { + log.error("Error calling WAF", e); + return; + } finally { if (log.isDebugEnabled()) { long elapsed = System.currentTimeMillis() - start; StandardizedLogging.finishedExecutionWAF(log, elapsed); } - - } catch (AbstractPowerwafException e) { - log.error("Error calling WAF", e); - return; } StandardizedLogging.inAppWafReturn(log, resultWithData); @@ -436,29 +430,27 @@ public void onDataAvailable( log.warn("WAF signalled result {}: {}", resultWithData.result, resultWithData.data); } - if (resultWithData.actions.length > 0) { - for (String action : resultWithData.actions) { - ActionInfo actionInfo = ctxAndAddr.actionInfoMap.get(action); - if (actionInfo == null) { - log.warn( - "WAF indicated action {}, but such action id is unknown (not one from {})", - action, - ctxAndAddr.actionInfoMap.keySet()); - } else if ("block_request".equals(actionInfo.type)) { - Flow.Action.RequestBlockingAction rba = createBlockRequestAction(actionInfo); - flow.setAction(rba); - break; - } else if ("redirect_request".equals(actionInfo.type)) { - Flow.Action.RequestBlockingAction rba = createRedirectRequestAction(actionInfo); - flow.setAction(rba); - break; - } else { - log.info("Ignoring action with type {}", actionInfo.type); - } + for (String action : resultWithData.actions) { + ActionInfo actionInfo = ctxAndAddr.actionInfoMap.get(action); + if (actionInfo == null) { + log.warn( + "WAF indicated action {}, but such action id is unknown (not one from {})", + action, + ctxAndAddr.actionInfoMap.keySet()); + } else if ("block_request".equals(actionInfo.type)) { + Flow.Action.RequestBlockingAction rba = createBlockRequestAction(actionInfo); + flow.setAction(rba); + break; + } else if ("redirect_request".equals(actionInfo.type)) { + Flow.Action.RequestBlockingAction rba = createRedirectRequestAction(actionInfo); + flow.setAction(rba); + break; + } else { + log.info("Ignoring action with type {}", actionInfo.type); } } - Collection events = buildEvents(resultWithData); - reqCtx.reportEvents(events, null); + Collection events = buildEvents(resultWithData); + reqCtx.reportEvents(events); if (flow.isBlocking()) { WafMetricCollector.get().wafRequestBlocked(); @@ -469,6 +461,10 @@ public void onDataAvailable( } else { WafMetricCollector.get().wafRequest(); } + + if (resultWithData != null && resultWithData.schemas != null) { + reqCtx.reportApiSchemas(resultWithData.schemas); + } } private Flow.Action.RequestBlockingAction createBlockRequestAction(ActionInfo actionInfo) { @@ -540,7 +536,7 @@ private Powerwaf.ResultWithData runPowerwafTransient( new DataBundleMapWrapper(ctxAndAddr.addressesOfInterest, bundle), LIMITS, metrics); } - private Collection buildEvents(Powerwaf.ResultWithData actionWithData) { + private Collection buildEvents(Powerwaf.ResultWithData actionWithData) { Collection listResults; try { listResults = RES_JSON_ADAPTER.fromJson(actionWithData.data); @@ -557,7 +553,7 @@ private Collection buildEvents(Powerwaf.ResultWithData actionWit return emptyList(); } - private AppSecEvent100 buildEvent(PowerWAFResultData wafResult) { + private AppSecEvent buildEvent(PowerWAFResultData wafResult) { if (wafResult == null || wafResult.rule == null || wafResult.rule_matches == null) { log.warn("WAF result is empty: {}", wafResult); @@ -571,7 +567,7 @@ private AppSecEvent100 buildEvent(PowerWAFResultData wafResult) { for (PowerWAFResultData.Parameter parameter : rule_match.parameters) { parameterList.add( - new Parameter.ParameterBuilder() + new Parameter.Builder() .withAddress(parameter.address) .withKeyPath(parameter.key_path) .withValue(parameter.value) @@ -580,7 +576,7 @@ private AppSecEvent100 buildEvent(PowerWAFResultData wafResult) { } RuleMatch ruleMatch = - new RuleMatch.RuleMatchBuilder() + new RuleMatch.Builder() .withOperator(rule_match.operator) .withOperatorValue(rule_match.operator_value) .withParameters(parameterList) @@ -589,16 +585,12 @@ private AppSecEvent100 buildEvent(PowerWAFResultData wafResult) { ruleMatchList.add(ruleMatch); } - return new AppSecEvent100.AppSecEvent100Builder() + return new AppSecEvent.Builder() .withRule( - new Rule.RuleBuilder() + new Rule.Builder() .withId(wafResult.rule.id) .withName(wafResult.rule.name) - .withTags( - new Tags.TagsBuilder() - .withType(wafResult.rule.tags.get("type")) - .withCategory(wafResult.rule.tags.get("category")) - .build()) + .withTags(wafResult.rule.tags) .build()) .withRuleMatches(ruleMatchList) .build(); diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFStatsReporter.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFStatsReporter.java index d1dabcb7c3e..49e5bf41b97 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFStatsReporter.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/powerwaf/PowerWAFStatsReporter.java @@ -2,7 +2,7 @@ import com.datadog.appsec.config.TraceSegmentPostProcessor; import com.datadog.appsec.gateway.AppSecRequestContext; -import com.datadog.appsec.report.raw.events.AppSecEvent100; +import com.datadog.appsec.report.AppSecEvent; import datadog.trace.api.internal.TraceSegment; import io.sqreen.powerwaf.PowerwafMetrics; import java.util.Collection; @@ -18,7 +18,7 @@ public class PowerWAFStatsReporter implements TraceSegmentPostProcessor { @Override public void processTraceSegment( - TraceSegment segment, AppSecRequestContext ctx, Collection collectedEvents) { + TraceSegment segment, AppSecRequestContext ctx, Collection collectedEvents) { PowerwafMetrics metrics = ctx.getWafMetrics(); if (metrics != null) { segment.setTagTop(TOTAL_DURATION_US_TAG, metrics.getTotalRunTimeNs() / 1000L); diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/AppSecEvent.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/AppSecEvent.java new file mode 100644 index 00000000000..7381f621994 --- /dev/null +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/AppSecEvent.java @@ -0,0 +1,96 @@ +package com.datadog.appsec.report; + +import java.util.ArrayList; +import java.util.List; + +public class AppSecEvent { + + @com.squareup.moshi.Json(name = "rule") + private Rule rule; + + @com.squareup.moshi.Json(name = "rule_matches") + private List ruleMatches = new ArrayList<>(); + + public Rule getRule() { + return rule; + } + + public void setRule(Rule rule) { + this.rule = rule; + } + + public List getRuleMatches() { + return ruleMatches; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(AppSecEvent.class.getName()) + .append('@') + .append(Integer.toHexString(System.identityHashCode(this))) + .append('['); + sb.append("rule"); + sb.append('='); + sb.append(((this.rule == null) ? "" : this.rule)); + sb.append(','); + sb.append("ruleMatches"); + sb.append('='); + sb.append(((this.ruleMatches == null) ? "" : this.ruleMatches)); + sb.append(','); + if (sb.charAt((sb.length() - 1)) == ',') { + sb.setCharAt((sb.length() - 1), ']'); + } else { + sb.append(']'); + } + return sb.toString(); + } + + @Override + public int hashCode() { + int result = 1; + result = ((result * 31) + ((this.rule == null) ? 0 : this.rule.hashCode())); + result = ((result * 31) + ((this.ruleMatches == null) ? 0 : this.ruleMatches.hashCode())); + return result; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } + if (!(other instanceof AppSecEvent)) { + return false; + } + AppSecEvent rhs = ((AppSecEvent) other); + return (((this.rule == rhs.rule) || ((this.rule != null) && this.rule.equals(rhs.rule))) + && ((this.ruleMatches == rhs.ruleMatches) + || ((this.ruleMatches != null) && this.ruleMatches.equals(rhs.ruleMatches)))); + } + + public static class Builder { + + protected AppSecEvent instance; + + public Builder() { + this.instance = new AppSecEvent(); + } + + public AppSecEvent build() { + AppSecEvent result; + result = this.instance; + this.instance = null; + return result; + } + + public Builder withRule(Rule rule) { + this.instance.rule = rule; + return this; + } + + public Builder withRuleMatches(List ruleMatches) { + this.instance.ruleMatches = ruleMatches; + return this; + } + } +} diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/AppSecEventWrapper.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/AppSecEventWrapper.java index 0157086756d..dcbff552463 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/AppSecEventWrapper.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/AppSecEventWrapper.java @@ -1,6 +1,5 @@ package com.datadog.appsec.report; -import com.datadog.appsec.report.raw.events.AppSecEvent100; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; import java.util.Collection; @@ -11,14 +10,14 @@ public class AppSecEventWrapper { private static final JsonAdapter ADAPTER = new Moshi.Builder().build().adapter(AppSecEventWrapper.class); - private Collection triggers; + private final Collection triggers; private String json; - public AppSecEventWrapper(Collection events) { + public AppSecEventWrapper(Collection events) { this.triggers = events; } - public Collection getTriggers() { + public Collection getTriggers() { return triggers; } diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/Parameter.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/Parameter.java new file mode 100644 index 00000000000..44fbcb8326e --- /dev/null +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/Parameter.java @@ -0,0 +1,158 @@ +package com.datadog.appsec.report; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +public class Parameter { + + /** + * The address containing the value that triggered the rule. For example ``http.server.query``. + * (Required) + */ + @com.squareup.moshi.Json(name = "address") + private String address; + + /** + * The path of the value that triggered the rule. For example ``["query", 0]`` to refer to the + * value in ``{"query": ["triggering value"]}``. (Required) + */ + @com.squareup.moshi.Json(name = "key_path") + private List keyPath = new ArrayList<>(); + + /** The value that triggered the rule. (Required) */ + @com.squareup.moshi.Json(name = "value") + private String value; + + /** The part of the value that triggered the rule. (Required) */ + @com.squareup.moshi.Json(name = "highlight") + private List highlight = new ArrayList<>(); + + /** + * The address containing the value that triggered the rule. For example ``http.server.query``. + * (Required) + */ + public String getAddress() { + return address; + } + + /** + * The address containing the value that triggered the rule. For example ``http.server.query``. + * (Required) + */ + public void setAddress(String address) { + this.address = address; + } + + /** + * The path of the value that triggered the rule. For example ``["query", 0]`` to refer to the + * value in ``{"query": ["triggering value"]}``. (Required) + */ + public List getKeyPath() { + return keyPath; + } + + /** The value that triggered the rule. (Required) */ + public String getValue() { + return value; + } + + /** The part of the value that triggered the rule. (Required) */ + public List getHighlight() { + return highlight; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(Parameter.class.getName()) + .append('@') + .append(Integer.toHexString(System.identityHashCode(this))) + .append('['); + sb.append("address"); + sb.append('='); + sb.append(((this.address == null) ? "" : this.address)); + sb.append(','); + sb.append("keyPath"); + sb.append('='); + sb.append(((this.keyPath == null) ? "" : this.keyPath)); + sb.append(','); + sb.append("value"); + sb.append('='); + sb.append(((this.value == null) ? "" : this.value)); + sb.append(','); + sb.append("highlight"); + sb.append('='); + sb.append(((this.highlight == null) ? "" : this.highlight)); + sb.append(','); + if (sb.charAt((sb.length() - 1)) == ',') { + sb.setCharAt((sb.length() - 1), ']'); + } else { + sb.append(']'); + } + return sb.toString(); + } + + @Override + public int hashCode() { + int result = 1; + result = ((result * 31) + ((this.highlight == null) ? 0 : this.highlight.hashCode())); + result = ((result * 31) + ((this.address == null) ? 0 : this.address.hashCode())); + result = ((result * 31) + ((this.value == null) ? 0 : this.value.hashCode())); + result = ((result * 31) + ((this.keyPath == null) ? 0 : this.keyPath.hashCode())); + return result; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } + if (!(other instanceof Parameter)) { + return false; + } + + Parameter rhs = ((Parameter) other); + return (Objects.equals(this.highlight, rhs.highlight) + || this.highlight != null && this.highlight.equals(rhs.highlight)) + && Objects.equals(this.address, rhs.address) + && Objects.equals(this.value, rhs.value) + && Objects.equals(this.keyPath, rhs.keyPath); + } + + public static class Builder { + + protected Parameter instance; + + public Builder() { + this.instance = new Parameter(); + } + + public Parameter build() { + Parameter result; + result = this.instance; + this.instance = null; + return result; + } + + public Builder withAddress(String address) { + this.instance.address = address; + return this; + } + + public Builder withKeyPath(List keyPath) { + this.instance.keyPath = keyPath; + return this; + } + + public Builder withValue(String value) { + this.instance.value = value; + return this; + } + + public Builder withHighlight(List highlight) { + this.instance.highlight = highlight; + return this; + } + } +} diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/Rule.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/Rule.java new file mode 100644 index 00000000000..40ba2ef8f03 --- /dev/null +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/Rule.java @@ -0,0 +1,135 @@ +package com.datadog.appsec.report; + +import java.util.Map; +import java.util.Objects; + +public class Rule { + /** + * The unique identifier of the rule that triggered the event. For example, ``ua-910-xax``. + * (Required) + */ + @com.squareup.moshi.Json(name = "id") + private java.lang.String id; + /** The friendly name of the rule that triggered the event. (Required) */ + @com.squareup.moshi.Json(name = "name") + private java.lang.String name; + /** (Required) */ + @com.squareup.moshi.Json(name = "tags") + private Map tags; + + /** + * The unique identifier of the rule that triggered the event. For example, ``ua-910-xax``. + * (Required) + */ + public java.lang.String getId() { + return id; + } + + /** + * The unique identifier of the rule that triggered the event. For example, ``ua-910-xax``. + * (Required) + */ + public void setId(java.lang.String id) { + this.id = id; + } + + /** The friendly name of the rule that triggered the event. (Required) */ + public java.lang.String getName() { + return name; + } + + /** The friendly name of the rule that triggered the event. (Required) */ + public void setName(java.lang.String name) { + this.name = name; + } + + /** (Required) */ + public Map getTags() { + return tags; + } + + /** (Required) */ + public void setTags(Map tags) { + this.tags = tags; + } + + @Override + public java.lang.String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(Rule.class.getName()) + .append('@') + .append(Integer.toHexString(System.identityHashCode(this))) + .append('['); + sb.append("id"); + sb.append('='); + sb.append(((this.id == null) ? "" : this.id)); + sb.append(','); + sb.append("name"); + sb.append('='); + sb.append(((this.name == null) ? "" : this.name)); + sb.append(','); + sb.append("tags"); + sb.append('='); + sb.append(((this.tags == null) ? "" : this.tags)); + sb.append(','); + if (sb.charAt((sb.length() - 1)) == ',') { + sb.setCharAt((sb.length() - 1), ']'); + } else { + sb.append(']'); + } + return sb.toString(); + } + + @Override + public int hashCode() { + int result = 1; + result = ((result * 31) + ((this.name == null) ? 0 : this.name.hashCode())); + result = ((result * 31) + ((this.id == null) ? 0 : this.id.hashCode())); + result = ((result * 31) + ((this.tags == null) ? 0 : this.tags.hashCode())); + return result; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } + if (!(other instanceof Rule)) { + return false; + } + Rule rhs = ((Rule) other); + return (((Objects.equals(this.name, rhs.name)) && (Objects.equals(this.id, rhs.id))) + && (Objects.equals(this.tags, rhs.tags))); + } + + public static class Builder { + + protected Rule instance; + + public Builder() { + this.instance = new Rule(); + } + + public Rule build() { + Rule result; + result = this.instance; + this.instance = null; + return result; + } + + public Builder withId(java.lang.String id) { + this.instance.id = id; + return this; + } + + public Builder withName(java.lang.String name) { + this.instance.name = name; + return this; + } + + public Builder withTags(Map tags) { + this.instance.tags = tags; + return this; + } + } +} diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/RuleMatch.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/RuleMatch.java new file mode 100644 index 00000000000..582b5fac5a6 --- /dev/null +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/report/RuleMatch.java @@ -0,0 +1,133 @@ +package com.datadog.appsec.report; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +public class RuleMatch { + /** + * The rule operator that triggered this event. For example, ``match_regex`` or ``phrase_match``. + * (Required) + */ + @com.squareup.moshi.Json(name = "operator") + private String operator; + /** + * The rule operator operand that triggered this event. For example, the word that triggered using + * the ``phrase_match`` operator. (Required) + */ + @com.squareup.moshi.Json(name = "operator_value") + private String operatorValue; + /** (Required) */ + @com.squareup.moshi.Json(name = "parameters") + private List parameters = new ArrayList<>(); + + /** + * The rule operator that triggered this event. For example, ``match_regex`` or ``phrase_match``. + * (Required) + */ + public String getOperator() { + return operator; + } + + /** + * The rule operator that triggered this event. For example, ``match_regex`` or ``phrase_match``. + * (Required) + */ + public void setOperator(String operator) { + this.operator = operator; + } + + /** + * The rule operator operand that triggered this event. For example, the word that triggered using + * the ``phrase_match`` operator. (Required) + */ + public String getOperatorValue() { + return operatorValue; + } + + /** (Required) */ + public List getParameters() { + return parameters; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(RuleMatch.class.getName()) + .append('@') + .append(Integer.toHexString(System.identityHashCode(this))) + .append('['); + sb.append("operator"); + sb.append('='); + sb.append(((this.operator == null) ? "" : this.operator)); + sb.append(','); + sb.append("operatorValue"); + sb.append('='); + sb.append(((this.operatorValue == null) ? "" : this.operatorValue)); + sb.append(','); + sb.append("parameters"); + sb.append('='); + sb.append(((this.parameters == null) ? "" : this.parameters)); + sb.append(','); + if (sb.charAt((sb.length() - 1)) == ',') { + sb.setCharAt((sb.length() - 1), ']'); + } else { + sb.append(']'); + } + return sb.toString(); + } + + @Override + public int hashCode() { + int result = 1; + result = ((result * 31) + ((this.parameters == null) ? 0 : this.parameters.hashCode())); + result = ((result * 31) + ((this.operator == null) ? 0 : this.operator.hashCode())); + result = ((result * 31) + ((this.operatorValue == null) ? 0 : this.operatorValue.hashCode())); + return result; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } + if (!(other instanceof RuleMatch)) { + return false; + } + RuleMatch rhs = ((RuleMatch) other); + return (Objects.equals(this.parameters, rhs.parameters)) + && Objects.equals(this.operator, rhs.operator) + && Objects.equals(this.operatorValue, rhs.operatorValue); + } + + public static class Builder { + + protected RuleMatch instance; + + public Builder() { + this.instance = new RuleMatch(); + } + + public RuleMatch build() { + RuleMatch result; + result = this.instance; + this.instance = null; + return result; + } + + public Builder withOperator(String operator) { + this.instance.operator = operator; + return this; + } + + public Builder withOperatorValue(String operatorValue) { + this.instance.operatorValue = operatorValue; + return this; + } + + public Builder withParameters(List parameters) { + this.instance.parameters = parameters; + return this; + } + } +} diff --git a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/util/StandardizedLogging.java b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/util/StandardizedLogging.java index e2ac7ade868..25703fa343b 100644 --- a/dd-java-agent/appsec/src/main/java/com/datadog/appsec/util/StandardizedLogging.java +++ b/dd-java-agent/appsec/src/main/java/com/datadog/appsec/util/StandardizedLogging.java @@ -1,8 +1,8 @@ package com.datadog.appsec.util; import com.datadog.appsec.event.data.Address; -import com.datadog.appsec.report.raw.events.AppSecEvent100; -import com.datadog.appsec.report.raw.events.Rule; +import com.datadog.appsec.report.AppSecEvent; +import com.datadog.appsec.report.Rule; import io.sqreen.powerwaf.Powerwaf; import org.slf4j.Logger; import org.slf4j.Marker; @@ -131,7 +131,7 @@ public static void inAppWafReturn(Logger logger, Powerwaf.ResultWithData resultW } // D6, I5 - public static void attackDetected(Logger logger, AppSecEvent100 event) { + public static void attackDetected(Logger logger, AppSecEvent event) { String ruleId = "unknown rule"; Rule rule = event.getRule(); if (rule != null) { diff --git a/dd-java-agent/appsec/src/main/resources/default_config.json b/dd-java-agent/appsec/src/main/resources/default_config.json index 4bbe6f9a5b5..a6e01468548 100644 --- a/dd-java-agent/appsec/src/main/resources/default_config.json +++ b/dd-java-agent/appsec/src/main/resources/default_config.json @@ -1,7 +1,7 @@ { "version": "2.2", "metadata": { - "rules_version": "1.7.1" + "rules_version": "1.8.0" }, "rules": [ { @@ -62,6 +62,8 @@ "crs_id": "913110", "category": "attack_attempt", "tool_name": "Acunetix", + "cwe": "200", + "capec": "1000/118/169", "confidence": "0" }, "conditions": [ @@ -94,6 +96,8 @@ "type": "security_scanner", "crs_id": "913120", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -108,6 +112,12 @@ }, { "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "list": [ @@ -144,6 +154,8 @@ "type": "http_protocol_violation", "crs_id": "920260", "category": "attack_attempt", + "cwe": "176", + "capec": "1000/255/153/267/71", "confidence": "0" }, "conditions": [ @@ -171,7 +183,9 @@ "tags": { "type": "http_protocol_violation", "crs_id": "921110", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "444", + "capec": "1000/210/272/220/33" }, "conditions": [ { @@ -206,7 +220,9 @@ "tags": { "type": "http_protocol_violation", "crs_id": "921160", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "113", + "capec": "1000/210/272/220/105" }, "conditions": [ { @@ -239,6 +255,8 @@ "type": "lfi", "crs_id": "930100", "category": "attack_attempt", + "cwe": "22", + "capec": "1000/255/153/126", "confidence": "1" }, "conditions": [ @@ -271,6 +289,8 @@ "type": "lfi", "crs_id": "930110", "category": "attack_attempt", + "cwe": "22", + "capec": "1000/255/153/126", "confidence": "1" }, "conditions": [ @@ -304,6 +324,8 @@ "type": "lfi", "crs_id": "930120", "category": "attack_attempt", + "cwe": "22", + "capec": "1000/255/153/126", "confidence": "1" }, "conditions": [ @@ -321,6 +343,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "list": [ @@ -1743,7 +1768,10 @@ "sys/hypervisor", "sys/kernel", "sys/module", - "sys/power" + "sys/power", + "windows\\win.ini", + "default\\ntuser.dat", + "/var/run/secrets/kubernetes.io/serviceaccount" ] }, "operator": "phrase_match" @@ -1761,6 +1789,8 @@ "type": "rfi", "crs_id": "931110", "category": "attack_attempt", + "cwe": "98", + "capec": "1000/152/175/253/193", "confidence": "1" }, "conditions": [ @@ -1787,7 +1817,9 @@ "tags": { "type": "rfi", "crs_id": "931120", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "98", + "capec": "1000/152/175/253/193" }, "conditions": [ { @@ -1801,6 +1833,12 @@ }, { "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "^(?i:file|ftps?)://.*?\\?+$", @@ -1821,6 +1859,8 @@ "type": "command_injection", "crs_id": "932160", "category": "attack_attempt", + "cwe": "77", + "capec": "1000/152/248/88", "confidence": "1" }, "conditions": [ @@ -1838,6 +1878,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "list": [ @@ -2312,7 +2355,8 @@ } ], "transformers": [ - "lowercase" + "lowercase", + "cmdLine" ] }, { @@ -2322,6 +2366,8 @@ "type": "command_injection", "crs_id": "932171", "category": "attack_attempt", + "cwe": "77", + "capec": "1000/152/248/88", "confidence": "1" }, "conditions": [ @@ -2342,6 +2388,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "^\\(\\s*\\)\\s+{", @@ -2362,6 +2411,8 @@ "type": "command_injection", "crs_id": "932180", "category": "attack_attempt", + "cwe": "706", + "capec": "1000/225/122/17/177", "confidence": "1" }, "conditions": [ @@ -2421,6 +2472,8 @@ "type": "unrestricted_file_upload", "crs_id": "933111", "category": "attack_attempt", + "cwe": "434", + "capec": "1000/225/122/17/650", "confidence": "1" }, "conditions": [ @@ -2472,6 +2525,8 @@ "type": "php_code_injection", "crs_id": "933130", "category": "attack_attempt", + "cwe": "94", + "capec": "1000/225/122/17/650", "confidence": "1" }, "conditions": [ @@ -2489,6 +2544,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "list": [ @@ -2528,7 +2586,9 @@ "tags": { "type": "php_code_injection", "crs_id": "933131", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/225/122/17/650" }, "conditions": [ { @@ -2545,6 +2605,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?:HTTP_(?:ACCEPT(?:_(?:ENCODING|LANGUAGE|CHARSET))?|(?:X_FORWARDED_FO|REFERE)R|(?:USER_AGEN|HOS)T|CONNECTION|KEEP_ALIVE)|PATH_(?:TRANSLATED|INFO)|ORIG_PATH_INFO|QUERY_STRING|REQUEST_URI|AUTH_TYPE)", @@ -2565,6 +2628,8 @@ "type": "php_code_injection", "crs_id": "933140", "category": "attack_attempt", + "cwe": "94", + "capec": "1000/225/122/17/650", "confidence": "1" }, "conditions": [ @@ -2582,6 +2647,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "php://(?:std(?:in|out|err)|(?:in|out)put|fd|memory|temp|filter)", @@ -2601,6 +2669,8 @@ "type": "php_code_injection", "crs_id": "933150", "category": "attack_attempt", + "cwe": "94", + "capec": "1000/225/122/17/650", "confidence": "1" }, "conditions": [ @@ -2618,6 +2688,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "list": [ @@ -2680,7 +2753,9 @@ "tags": { "type": "php_code_injection", "crs_id": "933160", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/225/122/17/650" }, "conditions": [ { @@ -2697,6 +2772,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\b(?:s(?:e(?:t(?:_(?:e(?:xception|rror)_handler|magic_quotes_runtime|include_path)|defaultstub)|ssion_s(?:et_save_handler|tart))|qlite_(?:(?:(?:unbuffered|single|array)_)?query|create_(?:aggregate|function)|p?open|exec)|tr(?:eam_(?:context_create|socket_client)|ipc?slashes|rev)|implexml_load_(?:string|file)|ocket_c(?:onnect|reate)|h(?:ow_sourc|a1_fil)e|pl_autoload_register|ystem)|p(?:r(?:eg_(?:replace(?:_callback(?:_array)?)?|match(?:_all)?|split)|oc_(?:(?:terminat|clos|nic)e|get_status|open)|int_r)|o(?:six_(?:get(?:(?:e[gu]|g)id|login|pwnam)|mk(?:fifo|nod)|ttyname|kill)|pen)|hp(?:_(?:strip_whitespac|unam)e|version|info)|g_(?:(?:execut|prepar)e|connect|query)|a(?:rse_(?:ini_file|str)|ssthru)|utenv)|r(?:unkit_(?:function_(?:re(?:defin|nam)e|copy|add)|method_(?:re(?:defin|nam)e|copy|add)|constant_(?:redefine|add))|e(?:(?:gister_(?:shutdown|tick)|name)_function|ad(?:(?:gz)?file|_exif_data|dir))|awurl(?:de|en)code)|i(?:mage(?:createfrom(?:(?:jpe|pn)g|x[bp]m|wbmp|gif)|(?:jpe|pn)g|g(?:d2?|if)|2?wbmp|xbm)|s_(?:(?:(?:execut|write?|read)ab|fi)le|dir)|ni_(?:get(?:_all)?|set)|terator_apply|ptcembed)|g(?:et(?:_(?:c(?:urrent_use|fg_va)r|meta_tags)|my(?:[gpu]id|inode)|(?:lastmo|cw)d|imagesize|env)|z(?:(?:(?:defla|wri)t|encod|fil)e|compress|open|read)|lob)|a(?:rray_(?:u(?:intersect(?:_u?assoc)?|diff(?:_u?assoc)?)|intersect_u(?:assoc|key)|diff_u(?:assoc|key)|filter|reduce|map)|ssert(?:_options)?|tob)|h(?:tml(?:specialchars(?:_decode)?|_entity_decode|entities)|(?:ash(?:_(?:update|hmac))?|ighlight)_file|e(?:ader_register_callback|x2bin))|f(?:i(?:le(?:(?:[acm]tim|inod)e|(?:_exist|perm)s|group)?|nfo_open)|tp_(?:nb_(?:ge|pu)|connec|ge|pu)t|(?:unction_exis|pu)ts|write|open)|o(?:b_(?:get_(?:c(?:ontents|lean)|flush)|end_(?:clean|flush)|clean|flush|start)|dbc_(?:result(?:_all)?|exec(?:ute)?|connect)|pendir)|m(?:b_(?:ereg(?:_(?:replace(?:_callback)?|match)|i(?:_replace)?)?|parse_str)|(?:ove_uploaded|d5)_file|ethod_exists|ysql_query|kdir)|e(?:x(?:if_(?:t(?:humbnail|agname)|imagetype|read_data)|ec)|scapeshell(?:arg|cmd)|rror_reporting|val)|c(?:url_(?:file_create|exec|init)|onvert_uuencode|reate_function|hr)|u(?:n(?:serialize|pack)|rl(?:de|en)code|[ak]?sort)|b(?:(?:son_(?:de|en)|ase64_en)code|zopen|toa)|(?:json_(?:de|en)cod|debug_backtrac|tmpfil)e|var_dump)(?:\\s|/\\*.*\\*/|//.*|#.*|\\\"|')*\\((?:(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:\\$\\w+|[A-Z\\d]\\w*|\\w+\\(.*\\)|\\\\?\"(?:[^\"]|\\\\\"|\"\"|\"\\+\")*\\\\?\"|\\\\?'(?:[^']|''|'\\+')*\\\\?')(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:(?:::|\\.|->)(?:\\s|/\\*.*\\*/|//.*|#.*)*\\w+(?:\\(.*\\))?)?,)*(?:(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:\\$\\w+|[A-Z\\d]\\w*|\\w+\\(.*\\)|\\\\?\"(?:[^\"]|\\\\\"|\"\"|\"\\+\")*\\\\?\"|\\\\?'(?:[^']|''|'\\+')*\\\\?')(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:(?:::|\\.|->)(?:\\s|/\\*.*\\*/|//.*|#.*)*\\w+(?:\\(.*\\))?)?)?\\)", @@ -2717,6 +2795,8 @@ "type": "php_code_injection", "crs_id": "933170", "category": "attack_attempt", + "cwe": "502", + "capec": "1000/152/586", "confidence": "1" }, "conditions": [ @@ -2737,6 +2817,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "[oOcC]:\\d+:\\\".+?\\\":\\d+:{[\\W\\w]*}", @@ -2756,7 +2839,9 @@ "tags": { "type": "php_code_injection", "crs_id": "933200", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "502", + "capec": "1000/152/586" }, "conditions": [ { @@ -2773,6 +2858,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?:(?:bzip|ssh)2|z(?:lib|ip)|(?:ph|r)ar|expect|glob|ogg)://", @@ -2794,7 +2882,9 @@ "tags": { "type": "js_code_injection", "crs_id": "934100", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242" }, "conditions": [ { @@ -2811,6 +2901,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\b(?:(?:l(?:(?:utimes|chmod)(?:Sync)?|(?:stat|ink)Sync)|w(?:rite(?:(?:File|v)(?:Sync)?|Sync)|atchFile)|u(?:n(?:watchFile|linkSync)|times(?:Sync)?)|s(?:(?:ymlink|tat)Sync|pawn(?:File|Sync))|ex(?:ec(?:File(?:Sync)?|Sync)|istsSync)|a(?:ppendFile|ccess)(?:Sync)?|(?:Caveat|Inode)s|open(?:dir)?Sync|new\\s+Function|Availability|\\beval)\\s*\\(|m(?:ain(?:Module\\s*(?:\\W*\\s*(?:constructor|require)|\\[)|\\s*(?:\\W*\\s*(?:constructor|require)|\\[))|kd(?:temp(?:Sync)?|irSync)\\s*\\(|odule\\.exports\\s*=)|c(?:(?:(?:h(?:mod|own)|lose)Sync|reate(?:Write|Read)Stream|p(?:Sync)?)\\s*\\(|o(?:nstructor\\s*(?:\\W*\\s*_load|\\[)|pyFile(?:Sync)?\\s*\\())|f(?:(?:(?:s(?:(?:yncS)?|tatS)|datas(?:yncS)?)ync|ch(?:mod|own)(?:Sync)?)\\s*\\(|u(?:nction\\s*\\(\\s*\\)\\s*{|times(?:Sync)?\\s*\\())|r(?:e(?:(?:ad(?:(?:File|link|dir)?Sync|v(?:Sync)?)|nameSync)\\s*\\(|quire\\s*(?:\\W*\\s*main|\\[))|m(?:Sync)?\\s*\\()|process\\s*(?:\\W*\\s*(?:mainModule|binding)|\\[)|t(?:his\\.constructor|runcateSync\\s*\\()|_(?:\\$\\$ND_FUNC\\$\\$_|_js_function)|global\\s*(?:\\W*\\s*process|\\[)|String\\s*\\.\\s*fromCharCode|binding\\s*\\[)", @@ -2831,7 +2924,9 @@ "type": "js_code_injection", "crs_id": "934101", "category": "attack_attempt", - "confidence": "1" + "confidence": "1", + "cwe": "94", + "capec": "1000/152/242" }, "conditions": [ { @@ -2848,6 +2943,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\b(?:w(?:atch|rite)|(?:spaw|ope)n|exists|close|fork|read)\\s*\\(", @@ -2868,6 +2966,8 @@ "type": "xss", "crs_id": "941110", "category": "attack_attempt", + "cwe": "80", + "capec": "1000/152/242/63/591", "confidence": "1" }, "conditions": [ @@ -2897,6 +2997,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "]*>[\\s\\S]*?", @@ -2919,6 +3022,8 @@ "type": "xss", "crs_id": "941120", "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", "confidence": "1" }, "conditions": [ @@ -2948,9 +3053,12 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], - "regex": "[\\s\\\"'`;\\/0-9=\\x0B\\x09\\x0C\\x3B\\x2C\\x28\\x3B]on(?:d(?:r(?:ag(?:en(?:ter|d)|leave|start|over)?|op)|urationchange|blclick)|s(?:e(?:ek(?:ing|ed)|arch|lect)|u(?:spend|bmit)|talled|croll|how)|m(?:ouse(?:(?:lea|mo)ve|o(?:ver|ut)|enter|down|up)|essage)|p(?:a(?:ge(?:hide|show)|(?:st|us)e)|lay(?:ing)?|rogress)|c(?:anplay(?:through)?|o(?:ntextmenu|py)|hange|lick|ut)|a(?:nimation(?:iteration|start|end)|(?:fterprin|bor)t)|t(?:o(?:uch(?:cancel|start|move|end)|ggle)|imeupdate)|f(?:ullscreen(?:change|error)|ocus(?:out|in)?)|(?:(?:volume|hash)chang|o(?:ff|n)lin)e|b(?:efore(?:unload|print)|lur)|load(?:ed(?:meta)?data|start)?|r(?:es(?:ize|et)|atechange)|key(?:press|down|up)|w(?:aiting|heel)|in(?:valid|put)|e(?:nded|rror)|unload)[\\s\\x0B\\x09\\x0C\\x3B\\x2C\\x28\\x3B]*?=[^=]", + "regex": "\\bon(?:d(?:r(?:ag(?:en(?:ter|d)|leave|start|over)?|op)|urationchange|blclick)|s(?:e(?:ek(?:ing|ed)|arch|lect)|u(?:spend|bmit)|talled|croll|how)|m(?:ouse(?:(?:lea|mo)ve|o(?:ver|ut)|enter|down|up)|essage)|p(?:a(?:ge(?:hide|show)|(?:st|us)e)|lay(?:ing)?|rogress|aste|ointer(?:cancel|down|enter|leave|move|out|over|rawupdate|up))|c(?:anplay(?:through)?|o(?:ntextmenu|py)|hange|lick|ut)|a(?:nimation(?:iteration|start|end)|(?:fterprin|bor)t|uxclick|fterscriptexecute)|t(?:o(?:uch(?:cancel|start|move|end)|ggle)|imeupdate)|f(?:ullscreen(?:change|error)|ocus(?:out|in)?|inish)|(?:(?:volume|hash)chang|o(?:ff|n)lin)e|b(?:efore(?:unload|print)|lur)|load(?:ed(?:meta)?data|start|end)?|r(?:es(?:ize|et)|atechange)|key(?:press|down|up)|w(?:aiting|heel)|in(?:valid|put)|e(?:nded|rror)|unload)[\\s\\x0B\\x09\\x0C\\x3B\\x2C\\x28\\x3B]*?=[^=]", "options": { "min_length": 8 } @@ -2970,6 +3078,8 @@ "type": "xss", "crs_id": "941140", "category": "attack_attempt", + "cwe": "84", + "capec": "1000/152/242/63/591/244", "confidence": "1" }, "conditions": [ @@ -2999,6 +3109,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "[a-z]+=(?:[^:=]+:.+;)*?[^:=]+:url\\(javascript", @@ -3021,6 +3134,8 @@ "type": "xss", "crs_id": "941170", "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", "confidence": "1" }, "conditions": [ @@ -3047,6 +3162,12 @@ }, { "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?:\\W|^)(?:javascript:(?:[\\s\\S]+[=\\x5c\\(\\[\\.<]|[\\s\\S]*?(?:\\bname\\b|\\x5c[ux]\\d)))|@\\W*?i\\W*?m\\W*?p\\W*?o\\W*?r\\W*?t\\W*?(?:/\\*[\\s\\S]*?)?(?:[\\\"']|\\W*?u\\W*?r\\W*?l[\\s\\S]*?\\()|[^-]*?-\\W*?m\\W*?o\\W*?z\\W*?-\\W*?b\\W*?i\\W*?n\\W*?d\\W*?i\\W*?n\\W*?g[^:]*?:\\W*?u\\W*?r\\W*?l[\\s\\S]*?\\(", @@ -3068,7 +3189,9 @@ "tags": { "type": "xss", "crs_id": "941180", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "79", + "capec": "1000/152/242/63/591" }, "conditions": [ { @@ -3085,6 +3208,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "list": [ @@ -3111,6 +3237,8 @@ "type": "xss", "crs_id": "941200", "category": "attack_attempt", + "cwe": "80", + "capec": "1000/152/242/63/591", "confidence": "1" }, "conditions": [ @@ -3128,6 +3256,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i:<.*[:]?vmlframe.*?[\\s/+]*?src[\\s/+]*=)", @@ -3150,6 +3281,8 @@ "type": "xss", "crs_id": "941210", "category": "attack_attempt", + "cwe": "80", + "capec": "1000/152/242/63/591", "confidence": "1" }, "conditions": [ @@ -3167,6 +3300,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i:(?:j|&#x?0*(?:74|4A|106|6A);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:a|&#x?0*(?:65|41|97|61);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:v|&#x?0*(?:86|56|118|76);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:a|&#x?0*(?:65|41|97|61);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:s|&#x?0*(?:83|53|115|73);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:c|&#x?0*(?:67|43|99|63);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:r|&#x?0*(?:82|52|114|72);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:i|&#x?0*(?:73|49|105|69);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:p|&#x?0*(?:80|50|112|70);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:t|&#x?0*(?:84|54|116|74);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?::|&(?:#x?0*(?:58|3A);?|colon;)).)", @@ -3189,6 +3325,8 @@ "type": "xss", "crs_id": "941220", "category": "attack_attempt", + "cwe": "80", + "capec": "1000/152/242/63/591", "confidence": "1" }, "conditions": [ @@ -3206,6 +3344,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i:(?:v|&#x?0*(?:86|56|118|76);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:b|&#x?0*(?:66|42|98|62);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:s|&#x?0*(?:83|53|115|73);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:c|&#x?0*(?:67|43|99|63);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:r|&#x?0*(?:82|52|114|72);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:i|&#x?0*(?:73|49|105|69);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:p|&#x?0*(?:80|50|112|70);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:t|&#x?0*(?:84|54|116|74);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?::|&(?:#x?0*(?:58|3A);?|colon;)).)", @@ -3228,6 +3369,8 @@ "type": "xss", "crs_id": "941230", "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", "confidence": "1" }, "conditions": [ @@ -3245,6 +3388,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "]", @@ -3419,6 +3585,8 @@ "type": "xss", "crs_id": "941300", "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", "confidence": "1" }, "conditions": [ @@ -3436,6 +3604,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": ")|<.*\\+AD4-", @@ -3493,7 +3669,9 @@ "tags": { "type": "xss", "crs_id": "941360", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "87", + "capec": "1000/152/242/63/591/199" }, "conditions": [ { @@ -3510,6 +3688,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "![!+ ]\\[\\]", @@ -3530,7 +3711,9 @@ "type": "xss", "crs_id": "941390", "category": "attack_attempt", - "confidence": "1" + "confidence": "1", + "cwe": "79", + "capec": "1000/152/242/63/591" }, "conditions": [ { @@ -3547,6 +3730,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\b(?i:eval|settimeout|setinterval|new\\s+Function|alert|prompt)[\\s+]*\\([^\\)]", @@ -3566,7 +3752,9 @@ "tags": { "type": "sql_injection", "crs_id": "942100", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66" }, "conditions": [ { @@ -3583,6 +3771,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ] }, @@ -3600,6 +3791,8 @@ "type": "sql_injection", "crs_id": "942160", "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66/7", "confidence": "1" }, "conditions": [ @@ -3617,6 +3810,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i:sleep\\(\\s*?\\d*?\\s*?\\)|benchmark\\(.*?\\,.*?\\))", @@ -3637,6 +3833,8 @@ "type": "sql_injection", "crs_id": "942240", "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66/7", "confidence": "1" }, "conditions": [ @@ -3654,6 +3852,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?:[\\\"'`](?:;*?\\s*?waitfor\\s+(?:delay|time)\\s+[\\\"'`]|;.*?:\\s*?goto)|alter\\s*?\\w+.*?cha(?:racte)?r\\s+set\\s+\\w+)", @@ -3672,7 +3873,9 @@ "tags": { "type": "sql_injection", "crs_id": "942250", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66" }, "conditions": [ { @@ -3689,6 +3892,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i:merge.*?using\\s*?\\(|execute\\s*?immediate\\s*?[\\\"'`]|match\\s*?[\\w(?:),+-]+\\s*?against\\s*?\\()", @@ -3708,7 +3914,9 @@ "tags": { "type": "sql_injection", "crs_id": "942270", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66" }, "conditions": [ { @@ -3725,6 +3933,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "union.*?select.*?from", @@ -3744,6 +3955,8 @@ "type": "sql_injection", "crs_id": "942280", "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66/7", "confidence": "1" }, "conditions": [ @@ -3761,6 +3974,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?:;\\s*?shutdown\\s*?(?:[#;{]|\\/\\*|--)|waitfor\\s*?delay\\s?[\\\"'`]+\\s?\\d|select\\s*?pg_sleep)", @@ -3779,7 +3995,9 @@ "tags": { "type": "nosql_injection", "crs_id": "942290", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "943", + "capec": "1000/152/248/676" }, "conditions": [ { @@ -3796,6 +4014,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i:(?:\\[?\\$(?:(?:s(?:lic|iz)|wher)e|e(?:lemMatch|xists|q)|n(?:o[rt]|in?|e)|l(?:ike|te?)|t(?:ext|ype)|a(?:ll|nd)|jsonSchema|between|regex|x?or|div|mod)\\]?)\\b)", @@ -3817,7 +4038,9 @@ "tags": { "type": "sql_injection", "crs_id": "942360", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66/470" }, "conditions": [ { @@ -3834,6 +4057,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?:^[\\W\\d]+\\s*?(?:alter\\s*(?:a(?:(?:pplication\\s*rol|ggregat)e|s(?:ymmetric\\s*ke|sembl)y|u(?:thorization|dit)|vailability\\s*group)|c(?:r(?:yptographic\\s*provider|edential)|o(?:l(?:latio|um)|nversio)n|ertificate|luster)|s(?:e(?:rv(?:ice|er)|curity|quence|ssion|arch)|y(?:mmetric\\s*key|nonym)|togroup|chema)|m(?:a(?:s(?:ter\\s*key|k)|terialized)|e(?:ssage\\s*type|thod)|odule)|l(?:o(?:g(?:file\\s*group|in)|ckdown)|a(?:ngua|r)ge|ibrary)|t(?:(?:abl(?:espac)?|yp)e|r(?:igger|usted)|hreshold|ext)|p(?:a(?:rtition|ckage)|ro(?:cedur|fil)e|ermission)|d(?:i(?:mension|skgroup)|atabase|efault|omain)|r(?:o(?:l(?:lback|e)|ute)|e(?:sourc|mot)e)|f(?:u(?:lltext|nction)|lashback|oreign)|e(?:xte(?:nsion|rnal)|(?:ndpoi|ve)nt)|in(?:dex(?:type)?|memory|stance)|b(?:roker\\s*priority|ufferpool)|x(?:ml\\s*schema|srobject)|w(?:ork(?:load)?|rapper)|hi(?:erarchy|stogram)|o(?:perator|utline)|(?:nicknam|queu)e|us(?:age|er)|group|java|view)|union\\s*(?:(?:distin|sele)ct|all))\\b|\\b(?:(?:(?:trunc|cre|upd)at|renam)e|(?:inser|selec)t|de(?:lete|sc)|alter|load)\\s+(?:group_concat|load_file|char)\\b\\s*\\(?|[\\s(]load_file\\s*?\\(|[\\\"'`]\\s+regexp\\W)", @@ -3852,7 +4078,9 @@ "tags": { "type": "sql_injection", "crs_id": "942500", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66" }, "conditions": [ { @@ -3869,6 +4097,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i:/\\*[!+](?:[\\w\\s=_\\-(?:)]+)?\\*/)", @@ -3889,6 +4120,8 @@ "type": "http_protocol_violation", "crs_id": "943100", "category": "attack_attempt", + "cwe": "384", + "capec": "1000/225/21/593/61", "confidence": "1" }, "conditions": [ @@ -3903,6 +4136,12 @@ }, { "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i:\\.cookie\\b.*?;\\W*?(?:expires|domain)\\W*?=|\\bhttp-equiv\\W+set-cookie\\b)", @@ -3923,6 +4162,8 @@ "type": "java_code_injection", "crs_id": "944100", "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242", "confidence": "1" }, "conditions": [ @@ -3943,6 +4184,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "java\\.lang\\.(?:runtime|processbuilder)", @@ -3964,7 +4208,9 @@ "tags": { "type": "java_code_injection", "crs_id": "944110", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242" }, "conditions": [ { @@ -3984,6 +4230,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?:runtime|processbuilder)", @@ -4011,6 +4260,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?:unmarshaller|base64data|java\\.)", @@ -4032,7 +4284,9 @@ "tags": { "type": "java_code_injection", "crs_id": "944130", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242" }, "conditions": [ { @@ -4052,6 +4306,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "list": [ @@ -4112,6 +4369,8 @@ "type": "java_code_injection", "crs_id": "944260", "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242", "confidence": "1" }, "conditions": [ @@ -4132,6 +4391,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?:class\\.module\\.classLoader\\.resources\\.context\\.parent\\.pipeline|springframework\\.context\\.support\\.FileSystemXmlApplicationContext)", @@ -4150,7 +4412,9 @@ "name": "Look for Cassandra injections", "tags": { "type": "nosql_injection", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "943", + "capec": "1000/152/248/676" }, "conditions": [ { @@ -4165,6 +4429,12 @@ { "address": "server.request.path_params" }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, { "address": "server.request.headers.no_cookies" } @@ -4183,7 +4453,9 @@ "name": "OGNL - Look for formatting injection patterns", "tags": { "type": "java_code_injection", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242" }, "conditions": [ { @@ -4204,6 +4476,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "[#%$]{(?:[^}]+[^\\w\\s}\\-_][^}]+|\\d+-\\d+)}", @@ -4221,6 +4496,8 @@ "tags": { "type": "java_code_injection", "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242", "confidence": "1" }, "conditions": [ @@ -4242,6 +4519,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "[@#]ognl", @@ -4259,6 +4539,8 @@ "tags": { "type": "exploit_detection", "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242", "confidence": "1" }, "conditions": [ @@ -4287,6 +4569,8 @@ "tags": { "type": "js_code_injection", "category": "attack_attempt", + "cwe": "1321", + "capec": "1000/152/242", "confidence": "1" }, "conditions": [ @@ -4315,6 +4599,8 @@ "tags": { "type": "js_code_injection", "category": "attack_attempt", + "cwe": "1321", + "capec": "1000/152/242", "confidence": "1" }, "conditions": [ @@ -4357,6 +4643,8 @@ "tags": { "type": "java_code_injection", "category": "attack_attempt", + "cwe": "1336", + "capec": "1000/152/242/19", "confidence": "1" }, "conditions": [ @@ -4377,6 +4665,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "#(?:set|foreach|macro|parse|if)\\(.*\\)|<#assign.*>" @@ -4393,6 +4684,8 @@ "type": "attack_tool", "category": "attack_attempt", "tool_name": "BurpCollaborator", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -4413,6 +4706,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\b(?:burpcollaborator\\.net|oastify\\.com)\\b" @@ -4429,6 +4725,8 @@ "type": "commercial_scanner", "category": "attack_attempt", "tool_name": "Qualys", + "cwe": "200", + "capec": "1000/118/169", "confidence": "0" }, "conditions": [ @@ -4449,6 +4747,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\bqualysperiscope\\.com\\b" @@ -4465,6 +4766,8 @@ "type": "commercial_scanner", "category": "attack_attempt", "tool_name": "Probely", + "cwe": "200", + "capec": "1000/118/169", "confidence": "0" }, "conditions": [ @@ -4485,6 +4788,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\bprbly\\.win\\b" @@ -4500,6 +4806,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -4520,6 +4828,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\b(?:webhook\\.site|\\.canarytokens\\.com|vii\\.one|act1on3\\.ru|gdsburp\\.com)\\b" @@ -4535,6 +4846,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "0" }, "conditions": [ @@ -4555,6 +4868,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\b(?:\\.ngrok\\.io|requestbin\\.com|requestbin\\.net)\\b" @@ -4571,6 +4887,8 @@ "type": "commercial_scanner", "category": "attack_attempt", "tool_name": "Rapid7", + "cwe": "200", + "capec": "1000/118/169", "confidence": "0" }, "conditions": [ @@ -4591,6 +4909,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\bappspidered\\.rapid7\\." @@ -4607,6 +4928,8 @@ "type": "attack_tool", "category": "attack_attempt", "tool_name": "interact.sh", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -4627,6 +4950,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\b(?:interact\\.sh|oast\\.(?:pro|live|site|online|fun|me))\\b" @@ -4636,12 +4962,59 @@ ], "transformers": [] }, + { + "id": "dog-913-008", + "name": "Netsparker OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "Netsparker", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + } + ], + "regex": "\\b(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)r87(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)(?:me|com)\\b", + "options": { + "case_sensitive": false, + "min_length": 7 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, { "id": "dog-931-001", "name": "RFI: URL Payload to well known RFI target", "tags": { "type": "rfi", "category": "attack_attempt", + "cwe": "98", + "capec": "1000/152/175/253/193", "confidence": "1" }, "conditions": [ @@ -4656,6 +5029,12 @@ }, { "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "^(?i:file|ftps?|https?).*/rfiinc\\.txt\\?+$", @@ -4675,6 +5054,8 @@ "tags": { "type": "xxe", "category": "attack_attempt", + "cwe": "91", + "capec": "1000/152/248/250", "confidence": "0" }, "conditions": [ @@ -4686,6 +5067,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?:<\\?xml[^>]*>.*)]+SYSTEM\\s+[^>]+>", @@ -4699,12 +5083,69 @@ ], "transformers": [] }, + { + "id": "dog-941-001", + "name": "XSS in source property", + "tags": { + "type": "xss", + "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "referer" + ] + }, + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + } + ], + "regex": "<(?:iframe|esi:include)(?:(?:\\s|/)*\\w+=[\"'\\w]+)*(?:\\s|/)*src(?:doc)?=[\"']?(?:data:|javascript:|http:|//)[^\\s'\"]+['\"]?", + "options": { + "min_length": 14 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls", + "urlDecodeUni" + ] + }, { "id": "dog-942-001", "name": "Blind XSS callback domains", "tags": { "type": "xss", "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", "confidence": "1" }, "conditions": [ @@ -4725,6 +5166,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "https?:\\/\\/(?:.*\\.)?(?:bxss\\.in|xss\\.ht|js\\.rip)", @@ -4743,6 +5187,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -4978,6 +5424,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -5018,6 +5466,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -5058,6 +5508,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -5098,6 +5550,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -5138,6 +5592,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -5178,6 +5634,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -5218,6 +5676,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -5258,6 +5718,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -5298,6 +5760,8 @@ "tags": { "type": "ssrf", "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", "confidence": "1" }, "conditions": [ @@ -5315,6 +5779,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i)^\\W*((http|ftp)s?://)?\\W*((::f{4}:)?(169|(0x)?0*a9|0+251)\\.?(254|(0x)?0*fe|0+376)[0-9a-fx\\.:]+|metadata\\.google\\.internal|metadata\\.goog)\\W*/", @@ -5334,7 +5801,9 @@ "name": "Server-side Javascript injection: Try to detect obvious JS injection", "tags": { "type": "js_code_injection", - "category": "attack_attempt" + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242" }, "conditions": [ { @@ -5351,6 +5820,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "require\\(['\"][\\w\\.]+['\"]\\)|process\\.\\w+\\([\\w\\.]*\\)|\\.toString\\(\\)", @@ -5371,6 +5843,8 @@ "tags": { "type": "command_injection", "category": "attack_attempt", + "cwe": "78", + "capec": "1000/152/248/88", "confidence": "1" }, "conditions": [ @@ -5391,6 +5865,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i)[&|]\\s*type\\s+%\\w+%\\\\+\\w+\\.ini\\s*[&|]" @@ -5406,6 +5883,8 @@ "tags": { "type": "command_injection", "category": "attack_attempt", + "cwe": "78", + "capec": "1000/152/248/88", "confidence": "1" }, "conditions": [ @@ -5426,14 +5905,19 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], - "regex": "(?i)[&|]\\s*cat\\s+\\/etc\\/[\\w\\.\\/]*passwd\\s*[&|]" + "regex": "(?i)[&|]\\s*cat\\s*\\/etc\\/[\\w\\.\\/]*passwd\\s*[&|]" }, "operator": "match_regex" } ], - "transformers": [] + "transformers": [ + "cmdLine" + ] }, { "id": "sqr-000-010", @@ -5441,6 +5925,8 @@ "tags": { "type": "command_injection", "category": "attack_attempt", + "cwe": "78", + "capec": "1000/152/248/88", "confidence": "1" }, "conditions": [ @@ -5461,6 +5947,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(?i)[&|]\\s*timeout\\s+/t\\s+\\d+\\s*[&|]" @@ -5476,6 +5965,8 @@ "tags": { "type": "ssrf", "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", "confidence": "1" }, "conditions": [ @@ -5493,6 +5984,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "http(s?):\\/\\/([A-Za-z0-9\\.\\-\\_]+|\\[[A-Fa-f0-9\\:]+\\]|):5986\\/wsman", @@ -5511,6 +6005,8 @@ "tags": { "type": "ssrf", "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", "confidence": "0" }, "conditions": [ @@ -5528,6 +6024,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "^(jar:)?(http|https):\\/\\/([0-9oq]{1,5}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}|[0-9]{1,10})(:[0-9]{1,5})?(\\/[^:@]*)?$" @@ -5545,6 +6044,8 @@ "tags": { "type": "ssrf", "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", "confidence": "0" }, "conditions": [ @@ -5562,6 +6063,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "^(jar:)?(http|https):\\/\\/((\\[)?[:0-9a-f\\.x]{2,}(\\])?)(:[0-9]{1,5})?(\\/[^:@]*)?$" @@ -5579,6 +6083,8 @@ "tags": { "type": "ssrf", "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", "confidence": "1" }, "conditions": [ @@ -5599,6 +6105,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "(http|https):\\/\\/(?:.*\\.)?(?:burpcollaborator\\.net|localtest\\.me|mail\\.ebc\\.apple\\.com|bugbounty\\.dod\\.network|.*\\.[nx]ip\\.io|oastify\\.com|oast\\.(?:pro|live|site|online|fun|me)|sslip\\.io|requestbin\\.com|requestbin\\.net|hookbin\\.com|webhook\\.site|canarytokens\\.com|interact\\.sh|ngrok\\.io|bugbounty\\.click|prbly\\.win|qualysperiscope\\.com|vii.one|act1on3.ru)" @@ -5614,6 +6123,8 @@ "tags": { "type": "ssrf", "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", "confidence": "0" }, "conditions": [ @@ -5634,6 +6145,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "^(jar:)?((file|netdoc):\\/\\/[\\\\\\/]+|(dict|gopher|ldap|sftp|tftp):\\/\\/.*:[0-9]{1,5})" @@ -5651,6 +6165,8 @@ "tags": { "type": "exploit_detection", "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242", "confidence": "1" }, "conditions": [ @@ -5674,6 +6190,9 @@ }, { "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" } ], "regex": "\\${[^j]*j[^n]*n[^d]*d[^i]*i[^:]*:[^}]*}" @@ -5691,6 +6210,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Joomla exploitation tool", "confidence": "1" }, @@ -5718,6 +6239,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Nessus", "confidence": "1" }, @@ -5745,6 +6268,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Arachni", "confidence": "1" }, @@ -5772,6 +6297,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Jorgee", "confidence": "1" }, @@ -5799,6 +6326,8 @@ "tags": { "type": "commercial_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Probely", "confidence": "0" }, @@ -5826,6 +6355,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Metis", "confidence": "1" }, @@ -5853,6 +6384,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "SQLPowerInjector", "confidence": "1" }, @@ -5880,6 +6413,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "N-Stealth", "confidence": "1" }, @@ -5907,6 +6442,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Brutus", "confidence": "1" }, @@ -5934,6 +6471,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -5960,6 +6499,8 @@ "tags": { "type": "commercial_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Netsparker", "confidence": "0" }, @@ -5987,6 +6528,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "JAASCois", "confidence": "1" }, @@ -6014,6 +6557,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Nsauditor", "confidence": "1" }, @@ -6041,6 +6586,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Paros", "confidence": "1" }, @@ -6068,6 +6615,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "DirBuster", "confidence": "1" }, @@ -6095,6 +6644,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Pangolin", "confidence": "1" }, @@ -6122,6 +6673,8 @@ "tags": { "type": "commercial_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Qualys", "confidence": "0" }, @@ -6149,6 +6702,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "SQLNinja", "confidence": "1" }, @@ -6176,6 +6731,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Nikto", "confidence": "1" }, @@ -6203,6 +6760,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "BlackWidow", "confidence": "1" }, @@ -6230,6 +6789,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Grendel-Scan", "confidence": "1" }, @@ -6257,6 +6818,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Havij", "confidence": "1" }, @@ -6284,6 +6847,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "w3af", "confidence": "1" }, @@ -6311,6 +6876,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Nmap", "confidence": "1" }, @@ -6338,6 +6905,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Nessus", "confidence": "1" }, @@ -6365,6 +6934,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "EvilScanner", "confidence": "1" }, @@ -6392,6 +6963,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "WebFuck", "confidence": "1" }, @@ -6419,6 +6992,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "OpenVAS", "confidence": "1" }, @@ -6446,6 +7021,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Spider-Pig", "confidence": "1" }, @@ -6473,6 +7050,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Zgrab", "confidence": "1" }, @@ -6500,6 +7079,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Zmeu", "confidence": "1" }, @@ -6527,6 +7108,8 @@ "tags": { "type": "commercial_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "GoogleSecurityScanner", "confidence": "0" }, @@ -6554,6 +7137,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Commix", "confidence": "1" }, @@ -6581,6 +7166,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Gobuster", "confidence": "1" }, @@ -6608,6 +7195,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "CGIchk", "confidence": "1" }, @@ -6635,6 +7224,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "FFUF", "confidence": "1" }, @@ -6662,6 +7253,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Nuclei", "confidence": "1" }, @@ -6689,6 +7282,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Tsunami", "confidence": "1" }, @@ -6716,6 +7311,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Nimbostratus", "confidence": "1" }, @@ -6743,6 +7340,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Datadog Canary Test", "confidence": "1" }, @@ -6776,6 +7375,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Datadog Canary Test", "confidence": "1" }, @@ -6812,6 +7413,8 @@ "tags": { "type": "commercial_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "AlertLogic", "confidence": "0" }, @@ -6839,6 +7442,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "wfuzz", "confidence": "1" }, @@ -6866,6 +7471,8 @@ "tags": { "type": "commercial_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Detectify", "confidence": "0" }, @@ -6893,6 +7500,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "BSQLBF", "confidence": "1" }, @@ -6920,6 +7529,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "masscan", "confidence": "1" }, @@ -6947,6 +7558,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "WPScan", "confidence": "1" }, @@ -6974,6 +7587,8 @@ "tags": { "type": "commercial_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Aon", "confidence": "0" }, @@ -7001,6 +7616,8 @@ "tags": { "type": "security_scanner", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "confidence": "1" }, "conditions": [ @@ -7014,7 +7631,10 @@ ] } ], - "regex": "mozilla/4\\.0 \\(compatible(; msie 6\\.0; win32)?\\)" + "regex": "mozilla/4\\.0 \\(compatible(; msie (?:6\\.0; win32|4\\.0; Windows NT))?\\)", + "options": { + "case_sensitive": false + } }, "operator": "match_regex" } @@ -7027,6 +7647,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "SQLmap", "confidence": "1" }, @@ -7054,6 +7676,8 @@ "tags": { "type": "attack_tool", "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", "tool_name": "Skipfish", "confidence": "1" }, @@ -7074,32 +7698,6 @@ } ], "transformers": [] - }, - { - "id": "__troubleshooting_rule", - "name": "troubleshooting rule for block on request body", - "tags": { - "type": "troubleshooting", - "category": "troubleshooting", - "confidence": "1" - }, - "conditions": [ - { - "parameters": { - "inputs": [ - { - "address": "server.request.body" - } - ], - "regex": "ADKMFFpndcwHNnr2MW9W" - }, - "operator": "match_regex" - } - ], - "transformers": [], - "on_match": [ - "block" - ] } ] -} +} \ No newline at end of file diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/AppSecModuleSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/AppSecModuleSpecification.groovy index 6904d032647..02fdd645276 100644 --- a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/AppSecModuleSpecification.groovy +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/AppSecModuleSpecification.groovy @@ -1,7 +1,6 @@ package com.datadog.appsec import com.datadog.appsec.event.ChangeableFlow -import com.datadog.appsec.event.EventType import com.datadog.appsec.event.OrderedCallback import com.datadog.appsec.event.data.Address import com.datadog.appsec.event.data.DataBundle @@ -25,19 +24,6 @@ class AppSecModuleSpecification extends DDSpecification { list == [ds3, ds1, ds2] } - void 'event subscriptions are correctly ordered'() { - def es1 = new NoopEventSubscription('es1', EventType.REQUEST_START, DEFAULT) - def es2 = new NoopEventSubscription('es2', EventType.REQUEST_START, HIGH) - def es3 = new NoopEventSubscription('es3', EventType.REQUEST_START, HIGH) - - when: - def list = [es3, es1, es2] - list.sort(OrderedCallback.CallbackPriorityComparator.INSTANCE) - - then: - list == [es3, es2, es1] - } - private static class NoopDataSubscription extends AppSecModule.DataSubscription { final String name @@ -55,22 +41,4 @@ class AppSecModuleSpecification extends DDSpecification { name } } - - private static class NoopEventSubscription extends AppSecModule.EventSubscription { - final String name - - NoopEventSubscription(String name, EventType eventType, Priority priority) { - super(eventType, priority) - this.name = name - } - - @Override - void onEvent(AppSecRequestContext ctx, EventType eventType) { - } - - @Override - String toString() { - name - } - } } diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/AppSecSystemSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/AppSecSystemSpecification.groovy index 76f19bd2476..12a9bd7c5db 100644 --- a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/AppSecSystemSpecification.groovy +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/AppSecSystemSpecification.groovy @@ -3,7 +3,7 @@ package com.datadog.appsec import com.datadog.appsec.config.AppSecConfig import com.datadog.appsec.event.EventProducerService import com.datadog.appsec.gateway.AppSecRequestContext -import com.datadog.appsec.report.raw.events.AppSecEvent100 +import com.datadog.appsec.report.AppSecEvent import com.datadog.appsec.util.AbortStartupException import datadog.communication.ddagent.DDAgentFeaturesDiscovery import datadog.communication.ddagent.SharedCommunicationObjects @@ -77,7 +77,7 @@ class AppSecSystemSpecification extends DDSpecification { 1 * subService.registerCallback(EVENTS.requestEnded(), _) >> { requestEndedCB = it[1]; null } 1 * requestContext.getData(RequestContextSlot.APPSEC) >> appSecReqCtx 1 * requestContext.traceSegment >> traceSegment - 1 * appSecReqCtx.transferCollectedEvents() >> [Mock(AppSecEvent100)] + 1 * appSecReqCtx.transferCollectedEvents() >> [Mock(AppSecEvent)] 1 * appSecReqCtx.getRequestHeaders() >> ['foo-bar': ['1.1.1.1']] 1 * appSecReqCtx.getResponseHeaders() >> [:] 1 * traceSegment.setTagTop('actor.ip', '1.1.1.1') @@ -105,7 +105,7 @@ class AppSecSystemSpecification extends DDSpecification { 1 * subService.registerCallback(EVENTS.requestEnded(), _) >> { requestEndedCB = it[1]; null } 7 * requestContext.getData(RequestContextSlot.APPSEC) >> appSecReqCtx 7 * requestContext.traceSegment >> traceSegment - 7 * appSecReqCtx.transferCollectedEvents() >> [Mock(AppSecEvent100)] + 7 * appSecReqCtx.transferCollectedEvents() >> [Mock(AppSecEvent)] // allow for one extra in case we move to another second and round down the prev count (5..6) * appSecReqCtx.getRequestHeaders() >> [:] (5..6) * appSecReqCtx.getResponseHeaders() >> [:] diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/api/security/ApiSecurityRequestSamplerTest.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/api/security/ApiSecurityRequestSamplerTest.groovy new file mode 100644 index 00000000000..b15a22e1c47 --- /dev/null +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/api/security/ApiSecurityRequestSamplerTest.groovy @@ -0,0 +1,43 @@ +package com.datadog.appsec.api.security + +import datadog.trace.api.Config +import datadog.trace.test.util.DDSpecification +import spock.lang.Shared + +class ApiSecurityRequestSamplerTest extends DDSpecification { + + @Shared + static final float DEFAULT_SAMPLE_RATE = Config.get().getApiSecurityRequestSampleRate() + + void 'Api Security Request Sample Rate'() { + given: + def config = Spy(Config.get()) + config.getApiSecurityRequestSampleRate() >> sampleRate + def sampler = new ApiSecurityRequestSampler(config) + + when: + def numOfRequest = expectedSampledRequests.size() + def results = new int[numOfRequest] + for (int i = 0; i < numOfRequest; i++) { + results[i] = sampler.sampleRequest() ? 1 : 0 + } + + then: + results == expectedSampledRequests as int[] + + where: + sampleRate | expectedSampledRequests + DEFAULT_SAMPLE_RATE | [0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0] // Default sample rate - 10% + 0.0 | [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + 0.1 | [0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0] + 0.25 | [0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1] + 0.33 | [0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1] + 0.5 | [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1] + 0.75 | [0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1] + 0.9 | [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0] + 0.99 | [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + 1.0 | [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + 1.25 | [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] // Wrong sample rate - use 100% + -0.5 | [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] // Wrong sample rate - use 100% + } +} diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/blocking/BlockingServiceImplSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/blocking/BlockingServiceImplSpecification.groovy index 705db033676..314fa037196 100644 --- a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/blocking/BlockingServiceImplSpecification.groovy +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/blocking/BlockingServiceImplSpecification.groovy @@ -108,7 +108,7 @@ class BlockingServiceImplSpecification extends DDSpecification { then: res == true - 1 * brf.tryCommitBlockingResponse(405, BlockingContentType.HTML, [:]) >> true + 1 * brf.tryCommitBlockingResponse(mts, 405, BlockingContentType.HTML, [:],) >> true 1 * mts.effectivelyBlocked() } diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/config/AppSecConfigDeserializerSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/config/AppSecConfigDeserializerSpecification.groovy new file mode 100644 index 00000000000..52775d4acb2 --- /dev/null +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/config/AppSecConfigDeserializerSpecification.groovy @@ -0,0 +1,61 @@ +package com.datadog.appsec.config + +import spock.lang.Specification + +import java.nio.charset.StandardCharsets + +class AppSecConfigDeserializerSpecification extends Specification { + + void "deserialize rule with unknown key"() { + given: + final deser = AppSecConfigDeserializer.INSTANCE + final input = """ + { + "version": "2.9999", + "metadata": { + "rules_version": "1.7.1" + }, + "exclusions": [ + { + "UNKNOWN_FIELD": "UNKNOWN_VALUE" + } + ], + "rules": [ + { + "UNKNOWN_FIELD": "UNKNOWN_VALUE", + "id": "blk-001-001", + "name": "Block IP Addresses", + "tags": { + "type": "block_ip", + "category": "security_response" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "http.client_ip" + } + ], + "data": "blocked_ips" + }, + "operator": "ip_match" + } + ], + "transformers": [], + "on_match": [ + "block" + ] + } + ] + } + """ + + when: + def result = deser.deserialize(input.getBytes(StandardCharsets.UTF_8)) + + then: + result != null + result.getNumberOfRules() == 1 + } +} diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/config/AppSecConfigServiceImplSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/config/AppSecConfigServiceImplSpecification.groovy index 4f8559e3760..439365faac0 100644 --- a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/config/AppSecConfigServiceImplSpecification.groovy +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/config/AppSecConfigServiceImplSpecification.groovy @@ -213,7 +213,7 @@ class AppSecConfigServiceImplSpecification extends DDSpecification { } 1 * poller.addConfigurationEndListener(_) >> { listeners.savedConfEndListener = it[0] } 1 * poller.addCapabilities(2L) - 1 * poller.addCapabilities(956L) + 1 * poller.addCapabilities(1980L) 0 * _._ initialWafConfig.get() != null @@ -350,7 +350,7 @@ class AppSecConfigServiceImplSpecification extends DDSpecification { } 1 * poller.addConfigurationEndListener(_) >> { listeners.savedConfEndListener = it[0] } 1 * poller.addCapabilities(2L) - 1 * poller.addCapabilities(956L) + 1 * poller.addCapabilities(1980L) 0 * _._ when: @@ -406,7 +406,7 @@ class AppSecConfigServiceImplSpecification extends DDSpecification { poller = null then: - 1 * poller.removeCapabilities(958L) + 1 * poller.removeCapabilities(1982L) 4 * poller.removeListener(_) 1 * poller.removeConfigurationEndListener(_) 1 * poller.stop() diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/event/EventDispatcherSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/event/EventDispatcherSpecification.groovy index e5e2def08a1..5cbcf9c6614 100644 --- a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/event/EventDispatcherSpecification.groovy +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/event/EventDispatcherSpecification.groovy @@ -9,35 +9,10 @@ import datadog.appsec.api.blocking.BlockingContentType import datadog.trace.api.gateway.Flow import datadog.trace.test.util.DDSpecification -import static org.hamcrest.Matchers.containsInAnyOrder -import static spock.util.matcher.HamcrestSupport.expect - class EventDispatcherSpecification extends DDSpecification { EventDispatcher dispatcher = new EventDispatcher() AppSecRequestContext ctx = Mock() - void 'notifies about events in order'() { - given: - EventListener eventListener1 = Mock() - EventListener eventListener2 = Mock() - eventListener1.priority >> OrderedCallback.Priority.DEFAULT - eventListener2.priority >> OrderedCallback.Priority.HIGH - - def set = new EventDispatcher.EventSubscriptionSet() - set.addSubscription(EventType.REQUEST_END, eventListener1) - set.addSubscription(EventType.REQUEST_END, eventListener2) - dispatcher.subscribeEvents(set) - - when: - dispatcher.publishEvent(ctx, EventType.REQUEST_END) - - then: - 1 * eventListener2.onEvent(ctx, EventType.REQUEST_END) - - then: - 1 * eventListener1.onEvent(ctx, EventType.REQUEST_END) - } - void 'notifies about data in order with the same flow'() { Flow savedFlow1, savedFlow2, savedFlow3 @@ -163,22 +138,16 @@ class EventDispatcherSpecification extends DDSpecification { } void 'saves the subscribed to events and addresses'() { - when: - EventListener eventListener = Mock() - eventListener.priority >> OrderedCallback.Priority.DEFAULT - def set = new EventDispatcher.EventSubscriptionSet() - set.addSubscription(EventType.REQUEST_END, eventListener) - + given: + def addressSet = new EventDispatcher.DataSubscriptionSet() DataListener dataListener = Mock() dataListener.priority >> OrderedCallback.Priority.DEFAULT - dispatcher.subscribeEvents(set) - def addressSet = new EventDispatcher.DataSubscriptionSet() + + when: addressSet.addSubscription([KnownAddresses.REQUEST_CLIENT_IP], dataListener) - dispatcher.subscribeDataAvailable(addressSet) then: - expect dispatcher.allSubscribedDataAddresses(), containsInAnyOrder(KnownAddresses.REQUEST_CLIENT_IP) - expect dispatcher.allSubscribedEvents(), containsInAnyOrder(EventType.REQUEST_END) + dispatcher.subscribeDataAvailable(addressSet) } void 'throws ExpiredSubscriberInfo if it is from a different EventDispatcher'() { diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/event/data/KnownAddressesSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/event/data/KnownAddressesSpecification.groovy index 0b0cfdeaf48..040297f1f57 100644 --- a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/event/data/KnownAddressesSpecification.groovy +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/event/data/KnownAddressesSpecification.groovy @@ -31,12 +31,13 @@ class KnownAddressesSpecification extends Specification { 'grpc.server.request.message', 'grpc.server.request.metadata', 'usr.id', + 'waf.context.processor', ] } void 'number of known addresses is expected number'() { expect: - Address.instanceCount() == 22 - KnownAddresses.USER_ID.serial == Address.instanceCount() - 1 + Address.instanceCount() == 24 + KnownAddresses.WAF_CONTEXT_PROCESSOR.serial == Address.instanceCount() - 1 } } diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/AppSecRequestContextSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/AppSecRequestContextSpecification.groovy index 2ad0e9dcd1f..4e7ce0b9d85 100644 --- a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/AppSecRequestContextSpecification.groovy +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/AppSecRequestContextSpecification.groovy @@ -4,7 +4,7 @@ package com.datadog.appsec.gateway import com.datadog.appsec.config.CurrentAppSecConfig import com.datadog.appsec.event.data.KnownAddresses import com.datadog.appsec.event.data.MapDataBundle -import com.datadog.appsec.report.raw.events.AppSecEvent100 +import com.datadog.appsec.report.AppSecEvent import com.datadog.appsec.test.StubAppSecConfigService import datadog.trace.test.util.DDSpecification import io.sqreen.powerwaf.Additive @@ -98,7 +98,7 @@ class AppSecRequestContextSpecification extends DDSpecification { void 'can collect events'() { when: - ctx.reportEvents([new AppSecEvent100(), new AppSecEvent100()], null) + ctx.reportEvents([new AppSecEvent(), new AppSecEvent()]) def events = ctx.transferCollectedEvents() then: @@ -107,7 +107,7 @@ class AppSecRequestContextSpecification extends DDSpecification { events[1] != null when: - ctx.reportEvents([new AppSecEvent100()], null) + ctx.reportEvents([new AppSecEvent()]) then: thrown IllegalStateException @@ -183,6 +183,6 @@ class AppSecRequestContextSpecification extends DDSpecification { then: ctx.additive == null - additive.online == false + !additive.online } } diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/GatewayBridgeIGRegistrationSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/GatewayBridgeIGRegistrationSpecification.groovy index 1f8213b4ab0..7457fd23e98 100644 --- a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/GatewayBridgeIGRegistrationSpecification.groovy +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/GatewayBridgeIGRegistrationSpecification.groovy @@ -1,7 +1,6 @@ package com.datadog.appsec.gateway import com.datadog.appsec.event.EventDispatcher -import com.datadog.appsec.event.EventType import com.datadog.appsec.event.data.KnownAddresses import datadog.trace.api.gateway.Events import datadog.trace.api.gateway.SubscriptionService @@ -11,11 +10,10 @@ class GatewayBridgeIGRegistrationSpecification extends DDSpecification { SubscriptionService ig = Mock() EventDispatcher eventDispatcher = Mock() - GatewayBridge bridge = new GatewayBridge(ig, eventDispatcher, null, []) + GatewayBridge bridge = new GatewayBridge(ig, eventDispatcher, null, null, []) void 'request_body_start and request_body_done are registered'() { given: - 1 * eventDispatcher.allSubscribedEvents() >> [EventType.REQUEST_BODY_START, EventType.REQUEST_BODY_END] 1 * eventDispatcher.allSubscribedDataAddresses() >> [] when: @@ -28,7 +26,6 @@ class GatewayBridgeIGRegistrationSpecification extends DDSpecification { void 'request_body_done is is registered via data address'() { given: - 1 * eventDispatcher.allSubscribedEvents() >> [] 1 * eventDispatcher.allSubscribedDataAddresses() >> [KnownAddresses.REQUEST_BODY_RAW] when: @@ -37,28 +34,4 @@ class GatewayBridgeIGRegistrationSpecification extends DDSpecification { then: 1 * ig.registerCallback(Events.REQUEST_BODY_DONE, _) } - - void 'request_body_start is not registered'() { - given: - 1 * eventDispatcher.allSubscribedEvents() >> [EventType.REQUEST_BODY_END] - 1 * eventDispatcher.allSubscribedDataAddresses() >> [] - - when: - bridge.init() - - then: - 0 * ig.registerCallback(Events.REQUEST_BODY_START, _) - } - - void 'request_body_end is not registered'() { - given: - 1 * eventDispatcher.allSubscribedEvents() >> [EventType.REQUEST_BODY_START] - 1 * eventDispatcher.allSubscribedDataAddresses() >> [] - - when: - bridge.init() - - then: - 0 * ig.registerCallback(Events.REQUEST_BODY_DONE, _) - } } diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/GatewayBridgeSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/GatewayBridgeSpecification.groovy index 8a0e58180fd..f1d3d7ee241 100644 --- a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/GatewayBridgeSpecification.groovy +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/gateway/GatewayBridgeSpecification.groovy @@ -4,12 +4,11 @@ import com.datadog.appsec.AppSecSystem import com.datadog.appsec.config.TraceSegmentPostProcessor import com.datadog.appsec.event.EventDispatcher import com.datadog.appsec.event.EventProducerService -import com.datadog.appsec.event.EventType import com.datadog.appsec.event.data.DataBundle import com.datadog.appsec.event.data.KnownAddresses import com.datadog.appsec.event.data.SingletonDataBundle +import com.datadog.appsec.report.AppSecEvent import com.datadog.appsec.report.AppSecEventWrapper -import com.datadog.appsec.report.raw.events.AppSecEvent100 import datadog.trace.api.internal.TraceSegment import datadog.trace.api.function.TriConsumer import datadog.trace.api.function.TriFunction @@ -62,7 +61,7 @@ class GatewayBridgeSpecification extends DDSpecification { RateLimiter rateLimiter = new RateLimiter(10, { -> 0L } as TimeSource, RateLimiter.ThrottledCallback.NOOP) TraceSegmentPostProcessor pp = Mock() - GatewayBridge bridge = new GatewayBridge(ig, eventDispatcher, rateLimiter, [pp]) + GatewayBridge bridge = new GatewayBridge(ig, eventDispatcher, rateLimiter, null, [pp]) Supplier> requestStartedCB BiFunction> requestEndedCB @@ -94,7 +93,6 @@ class GatewayBridgeSpecification extends DDSpecification { Flow startFlow = requestStartedCB.get() then: - 1 * eventDispatcher.publishEvent(_ as AppSecRequestContext, EventType.REQUEST_START) Object producedCtx = startFlow.getResult() producedCtx instanceof AppSecRequestContext startFlow.action == Flow.Action.Noop.INSTANCE @@ -117,7 +115,7 @@ class GatewayBridgeSpecification extends DDSpecification { } void 'request_end closes context reports attacks and publishes event'() { - AppSecEvent100 event = Mock() + AppSecEvent event = Mock() AppSecRequestContext mockAppSecCtx = Mock(AppSecRequestContext) mockAppSecCtx.requestHeaders >> ['accept':['header_value']] mockAppSecCtx.responseHeaders >> [ @@ -145,13 +143,13 @@ class GatewayBridgeSpecification extends DDSpecification { 1 * traceSegment.setTagTop('http.request.headers.accept', 'header_value') 1 * traceSegment.setTagTop('http.response.headers.content-type', 'text/html; charset=UTF-8') 1 * traceSegment.setTagTop('network.client.ip', '2001::1') - 1 * eventDispatcher.publishEvent(mockAppSecCtx, EventType.REQUEST_END) + 1 * mockAppSecCtx.closeAdditive() flow.result == null flow.action == Flow.Action.Noop.INSTANCE } void 'event publishing is rate limited'() { - AppSecEvent100 event = Mock() + AppSecEvent event = Mock() AppSecRequestContext mockAppSecCtx = Mock(AppSecRequestContext) mockAppSecCtx.requestHeaders >> [:] RequestContext mockCtx = Mock(RequestContext) { @@ -166,7 +164,7 @@ class GatewayBridgeSpecification extends DDSpecification { then: 11 * mockAppSecCtx.transferCollectedEvents() >> [event] 11 * mockAppSecCtx.close() - 11 * eventDispatcher.publishEvent(mockAppSecCtx, EventType.REQUEST_END) + 11 * mockAppSecCtx.closeAdditive() 10 * spanInfo.getTags() >> ['http.client_ip':'1.1.1.1'] 10 * traceSegment.setDataTop("appsec", _) } @@ -187,7 +185,7 @@ class GatewayBridgeSpecification extends DDSpecification { requestEndedCB.apply(mockCtx, spanInfo) then: - 1 * mockAppSecCtx.transferCollectedEvents() >> [Mock(AppSecEvent100)] + 1 * mockAppSecCtx.transferCollectedEvents() >> [Mock(AppSecEvent)] 1 * spanInfo.getTags() >> ['http.client_ip':'8.8.8.8'] 1 * traceSegment.setTagTop('actor.ip', '8.8.8.8') } @@ -245,7 +243,8 @@ class GatewayBridgeSpecification extends DDSpecification { then: thrown(IllegalStateException) - assert bundle.get(KnownAddresses.HEADERS_NO_COOKIES).isEmpty() + def data = bundle.get(KnownAddresses.HEADERS_NO_COOKIES) + assert data == null || data.isEmpty() } void 'the socket address is distributed'() { @@ -393,7 +392,6 @@ class GatewayBridgeSpecification extends DDSpecification { void callInitAndCaptureCBs() { // force all callbacks to be registered - _ * eventDispatcher.allSubscribedEvents() >> [EventType.REQUEST_BODY_START, EventType.REQUEST_BODY_END] _ * eventDispatcher.allSubscribedDataAddresses() >> [KnownAddresses.REQUEST_PATH_PARAMS, KnownAddresses.REQUEST_BODY_OBJECT] 1 * ig.registerCallback(EVENTS.requestStarted(), _) >> { requestStartedCB = it[1]; null } @@ -531,7 +529,6 @@ class GatewayBridgeSpecification extends DDSpecification { requestBodyStartCB.apply(ctx, supplier) then: - 1 * eventDispatcher.publishEvent(ctx.data, EventType.REQUEST_BODY_START) ctx.data.storedRequestBody == 'foobar' } @@ -549,7 +546,6 @@ class GatewayBridgeSpecification extends DDSpecification { requestBodyDoneCB.apply(ctx, supplier) then: - 1 * eventDispatcher.publishEvent(ctx.data, EventType.REQUEST_BODY_END) bundle.get(KnownAddresses.REQUEST_BODY_RAW) == 'foobar' } @@ -567,7 +563,6 @@ class GatewayBridgeSpecification extends DDSpecification { then: flow == NoopFlow.INSTANCE 0 * eventDispatcher.getDataSubscribers(KnownAddresses.REQUEST_BODY_RAW) - 0 * eventDispatcher.publishEvent(ctx.data, EventType.REQUEST_BODY_END) } void 'forward request body processed'() { diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/powerwaf/PowerWAFModuleSpecification.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/powerwaf/PowerWAFModuleSpecification.groovy index 2d27e08397d..44611653cc3 100644 --- a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/powerwaf/PowerWAFModuleSpecification.groovy +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/powerwaf/PowerWAFModuleSpecification.groovy @@ -8,16 +8,14 @@ import com.datadog.appsec.config.CurrentAppSecConfig import com.datadog.appsec.config.TraceSegmentPostProcessor import com.datadog.appsec.event.ChangeableFlow import com.datadog.appsec.event.DataListener -import com.datadog.appsec.event.EventListener -import com.datadog.appsec.event.EventType +import com.datadog.appsec.event.data.Address import com.datadog.appsec.event.data.CaseInsensitiveMap import com.datadog.appsec.event.data.DataBundle import com.datadog.appsec.event.data.KnownAddresses import com.datadog.appsec.event.data.MapDataBundle import com.datadog.appsec.gateway.AppSecRequestContext -import com.datadog.appsec.report.raw.events.AppSecEvent100 -import com.datadog.appsec.report.raw.events.Parameter -import com.datadog.appsec.report.raw.events.Tags +import com.datadog.appsec.report.AppSecEvent +import com.datadog.appsec.report.Parameter import com.datadog.appsec.test.StubAppSecConfigService import datadog.trace.api.ConfigDefaults import datadog.trace.api.internal.TraceSegment @@ -45,7 +43,6 @@ class PowerWAFModuleSpecification extends DDSpecification { StubAppSecConfigService service PowerWAFModule pwafModule = new PowerWAFModule() DataListener dataListener - EventListener eventListener Additive pwafAdditive PowerwafMetrics metrics @@ -64,7 +61,6 @@ class PowerWAFModuleSpecification extends DDSpecification { service.init() pwafModule.config(service) dataListener = pwafModule.dataSubscriptions.first() - eventListener = pwafModule.eventSubscriptions.first() } void 'use default actions if none defined in config'() { @@ -134,7 +130,6 @@ class PowerWAFModuleSpecification extends DDSpecification { when: setupWithStubConfigService('override_actions_config.json') dataListener = pwafModule.dataSubscriptions.first() - eventListener = pwafModule.eventSubscriptions.first() def actions = [ [ @@ -179,7 +174,7 @@ class PowerWAFModuleSpecification extends DDSpecification { '1.2.3.4' ) dataListener.onDataAvailable(flow, ctx, newBundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * reconf.reloadSubscriptions() @@ -190,7 +185,7 @@ class PowerWAFModuleSpecification extends DDSpecification { 1 * ctx.getOrCreateAdditive(_ as PowerwafContext, true) >> { pwafAdditive = it[0].openAdditive() } - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * ctx.getWafMetrics() 1 * ctx.closeAdditive() 1 * flow.isBlocking() @@ -204,14 +199,14 @@ class PowerWAFModuleSpecification extends DDSpecification { when: setupWithStubConfigService('rules_with_data_config.json') dataListener = pwafModule.dataSubscriptions.first() - eventListener = pwafModule.eventSubscriptions.first() + ctx.closeAdditive() def bundle = MapDataBundle.of( KnownAddresses.USER_ID, 'user-to-block-1' ) dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * flow.setAction({ Flow.Action.RequestBlockingAction rba -> @@ -221,9 +216,9 @@ class PowerWAFModuleSpecification extends DDSpecification { 1 * ctx.getOrCreateAdditive(_ as PowerwafContext, true) >> { pwafAdditive = it[0].openAdditive() } - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * ctx.getWafMetrics() - 1 * ctx.closeAdditive() + 2 * ctx.closeAdditive() 1 * flow.isBlocking() 0 * _ @@ -232,10 +227,12 @@ class PowerWAFModuleSpecification extends DDSpecification { [ id : 'blocked_users', type: 'data_with_expiration', - data: [[ + data: [ + [ value : 'user-to-block-2', expiration: '0', - ]] + ] + ] ] ] service.currentAppSecConfig.with { @@ -247,7 +244,7 @@ class PowerWAFModuleSpecification extends DDSpecification { } dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * reconf.reloadSubscriptions() @@ -258,7 +255,7 @@ class PowerWAFModuleSpecification extends DDSpecification { 1 * ctx.getOrCreateAdditive(_ as PowerwafContext, true) >> { pwafAdditive = it[0].openAdditive() } - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * ctx.getWafMetrics() 1 * ctx.closeAdditive() 1 * flow.isBlocking() @@ -270,7 +267,7 @@ class PowerWAFModuleSpecification extends DDSpecification { 'user-to-block-2' ) dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * flow.setAction({ Flow.Action.RequestBlockingAction rba -> @@ -280,7 +277,7 @@ class PowerWAFModuleSpecification extends DDSpecification { 1 * ctx.getOrCreateAdditive(_ as PowerwafContext, true) >> { pwafAdditive = it[0].openAdditive() } - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * ctx.getWafMetrics() 1 * ctx.closeAdditive() 1 * flow.isBlocking() @@ -323,7 +320,7 @@ class PowerWAFModuleSpecification extends DDSpecification { 'user-to-block-2' ) dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * reconf.reloadSubscriptions() @@ -334,7 +331,7 @@ class PowerWAFModuleSpecification extends DDSpecification { 1 * ctx.getOrCreateAdditive(_ as PowerwafContext, true) >> { pwafAdditive = it[0].openAdditive() } - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * ctx.getWafMetrics() 1 * ctx.closeAdditive() 1 * flow.isBlocking() @@ -346,7 +343,7 @@ class PowerWAFModuleSpecification extends DDSpecification { 'user-to-block-1' ) dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * ctx.getOrCreateAdditive(_ as PowerwafContext, true) >> { @@ -366,7 +363,7 @@ class PowerWAFModuleSpecification extends DDSpecification { def exclusions = [ [ - id : 1, + id : '1', rules_target: [ [ tags: [ @@ -402,13 +399,13 @@ class PowerWAFModuleSpecification extends DDSpecification { when: dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * ctx.getOrCreateAdditive(_, true) >> { pwafAdditive = it[0].openAdditive() } - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * ctx.getWafMetrics() 1 * ctx.closeAdditive() >> { pwafAdditive.close() } 0 * _ @@ -421,7 +418,7 @@ class PowerWAFModuleSpecification extends DDSpecification { '192.168.0.1' ) dataListener.onDataAvailable(flow, ctx, newBundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * ctx.getOrCreateAdditive(_, true) >> { @@ -479,14 +476,14 @@ class PowerWAFModuleSpecification extends DDSpecification { when: dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * ctx.getOrCreateAdditive(_, true) >> { pwafAdditive = it[0].openAdditive() } // we get two events: one for origin rule, and one for the custom one - 1 * ctx.reportEvents(hasSize(2), _) + 1 * ctx.reportEvents(hasSize(2)) 1 * ctx.getWafMetrics() 1 * ctx.closeAdditive() 0 * _ @@ -550,7 +547,7 @@ class PowerWAFModuleSpecification extends DDSpecification { when: dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: // original rule is replaced; no attack @@ -559,7 +556,7 @@ class PowerWAFModuleSpecification extends DDSpecification { rba.blockingContentType == BlockingContentType.AUTO }) 1 * ctx.getOrCreateAdditive(_, true) >> { it[0].openAdditive() } - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * ctx.getWafMetrics() 1 * ctx.closeAdditive() 1 * flow.isBlocking() @@ -573,7 +570,7 @@ class PowerWAFModuleSpecification extends DDSpecification { new CaseInsensitiveMap>(['user-agent': 'redirect' + variant])) def flow = new ChangeableFlow() dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * ctx.getOrCreateAdditive(_, true) >> { @@ -584,7 +581,7 @@ class PowerWAFModuleSpecification extends DDSpecification { } 1 * ctx.getWafMetrics() >> metrics 1 * ctx.closeAdditive() - 1 * ctx.reportEvents(_, _) + 1 * ctx.reportEvents(_) 0 * ctx._(*_) flow.blocking == true flow.action instanceof Flow.Action.RequestBlockingAction @@ -636,7 +633,7 @@ class PowerWAFModuleSpecification extends DDSpecification { when: dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * ctx.getOrCreateAdditive(_, true) >> { @@ -647,7 +644,7 @@ class PowerWAFModuleSpecification extends DDSpecification { } 1 * ctx.getWafMetrics() >> metrics 1 * ctx.closeAdditive() - 1 * ctx.reportEvents(_, _) + 1 * ctx.reportEvents(_) 0 * ctx._(*_) flow.blocking == true flow.action.statusCode == 418 @@ -663,7 +660,7 @@ class PowerWAFModuleSpecification extends DDSpecification { when: dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * ctx.getOrCreateAdditive(_, false) >> { @@ -671,7 +668,7 @@ class PowerWAFModuleSpecification extends DDSpecification { } 1 * ctx.getWafMetrics() >> null 1 * ctx.closeAdditive() - 1 * ctx.reportEvents(_, _) + 1 * ctx.reportEvents(_) 0 * ctx._(*_) metrics == null } @@ -686,7 +683,7 @@ class PowerWAFModuleSpecification extends DDSpecification { setupWithStubConfigService() pp = service.traceSegmentPostProcessors[1] dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() pp.processTraceSegment(segment, ctx, []) then: @@ -728,29 +725,26 @@ class PowerWAFModuleSpecification extends DDSpecification { void 'reports events'() { setupWithStubConfigService() - AppSecEvent100 event + AppSecEvent event when: dataListener.onDataAvailable(Mock(ChangeableFlow), ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: ctx.getOrCreateAdditive(_, true) >> { pwafAdditive = it[0].openAdditive() } - ctx.reportEvents(_ as Collection, _) >> { event = it[0].iterator().next() } + ctx.reportEvents(_ as Collection) >> { event = it[0].iterator().next() } event.rule.id == 'ua0-600-12x' event.rule.name == 'Arachni' - event.rule.tags == new Tags.TagsBuilder() - .withType('security_scanner') - .withCategory('attack_attempt') - .build() + event.rule.tags == [type: 'security_scanner', category: 'attack_attempt'] event.ruleMatches[0].operator == 'match_regex' event.ruleMatches[0].operatorValue == '^Arachni\\/v' event.ruleMatches[0].parameters == [ - new Parameter.ParameterBuilder() + new Parameter.Builder() .withAddress('server.request.headers.no_cookies') .withKeyPath(['user-agent']) .withValue('Arachni/v0') @@ -761,22 +755,22 @@ class PowerWAFModuleSpecification extends DDSpecification { void 'redaction with default settings'() { setupWithStubConfigService() - AppSecEvent100 event + AppSecEvent event when: def bundle = MapDataBundle.of(KnownAddresses.HEADERS_NO_COOKIES, new CaseInsensitiveMap>(['user-agent': [password: 'Arachni/v0']])) dataListener.onDataAvailable(Mock(ChangeableFlow), ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: ctx.getOrCreateAdditive(_, true) >> { pwafAdditive = it[0].openAdditive() } - ctx.reportEvents(_ as Collection, _) >> { event = it[0].iterator().next() } + ctx.reportEvents(_ as Collection) >> { event = it[0].iterator().next() } event.ruleMatches[0].parameters == [ - new Parameter.ParameterBuilder() + new Parameter.Builder() .withAddress('server.request.headers.no_cookies') .withKeyPath(['user-agent', 'password']) .withValue('') @@ -788,22 +782,22 @@ class PowerWAFModuleSpecification extends DDSpecification { void 'disabling of key regex'() { injectSysConfig(APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP, '') setupWithStubConfigService() - AppSecEvent100 event + AppSecEvent event when: def bundle = MapDataBundle.of(KnownAddresses.HEADERS_NO_COOKIES, new CaseInsensitiveMap>(['user-agent': [password: 'Arachni/v0']])) dataListener.onDataAvailable(Mock(ChangeableFlow), ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: ctx.getOrCreateAdditive(_, true) >> { pwafAdditive = it[0].openAdditive() } - ctx.reportEvents(_ as Collection, _) >> { event = it[0].iterator().next() } + ctx.reportEvents(_ as Collection) >> { event = it[0].iterator().next() } event.ruleMatches[0].parameters == [ - new Parameter.ParameterBuilder() + new Parameter.Builder() .withAddress('server.request.headers.no_cookies') .withKeyPath(['user-agent', 'password']) .withValue('Arachni/v0') @@ -816,20 +810,20 @@ class PowerWAFModuleSpecification extends DDSpecification { injectSysConfig(APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP, 'Arachni') setupWithStubConfigService() - AppSecEvent100 event + AppSecEvent event when: dataListener.onDataAvailable(Mock(ChangeableFlow), ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: ctx.getOrCreateAdditive(_, true) >> { pwafAdditive = it[0].openAdditive() } - ctx.reportEvents(_ as Collection, _) >> { event = it[0].iterator().next() } + ctx.reportEvents(_ as Collection) >> { event = it[0].iterator().next() } event.ruleMatches[0].parameters == [ - new Parameter.ParameterBuilder() + new Parameter.Builder() .withAddress('server.request.headers.no_cookies') .withKeyPath(['user-agent']) .withValue('') @@ -890,12 +884,6 @@ class PowerWAFModuleSpecification extends DDSpecification { PowerWAFModule.createLimitsObject() } - void 'subscribes 1 event'() { - expect: - pwafModule.eventSubscriptions.isEmpty() == false - pwafModule.eventSubscriptions.first().eventType == EventType.REQUEST_END - } - void 'configuration can be given later'() { def cfgService = new StubAppSecConfigService([waf: null]) AppSecModuleConfigurer.Reconfiguration reconf = Mock() @@ -910,14 +898,13 @@ class PowerWAFModuleSpecification extends DDSpecification { when: cfgService.listeners['waf'].onNewSubconfig(defaultConfig['waf'], reconf) dataListener = pwafModule.dataSubscriptions.first() - eventListener = pwafModule.eventSubscriptions.first() dataListener.onDataAvailable(Mock(ChangeableFlow), ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * ctx.getOrCreateAdditive(_, true) >> { pwafAdditive = it[0].openAdditive() } - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * reconf.reloadSubscriptions() } @@ -945,15 +932,14 @@ class PowerWAFModuleSpecification extends DDSpecification { } dataListener = pwafModule.dataSubscriptions.first() - eventListener = pwafModule.eventSubscriptions.first() def bundle = MapDataBundle.of(KnownAddresses.REQUEST_INFERRED_CLIENT_IP, '1.2.3.4') dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * reconf.reloadSubscriptions() 1 * ctx.getOrCreateAdditive(_, true) >> { pwafAdditive = it[0].openAdditive() } - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * ctx.getWafMetrics() 1 * flow.setAction({ it.blocking }) 1 * ctx.closeAdditive() @@ -1000,10 +986,9 @@ class PowerWAFModuleSpecification extends DDSpecification { } dataListener = pwafModule.dataSubscriptions.first() - eventListener = pwafModule.eventSubscriptions.first() def bundle = MapDataBundle.of(KnownAddresses.REQUEST_INFERRED_CLIENT_IP, '1.2.3.4') dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 'no match; rule is disabled' 1 * reconf.reloadSubscriptions() @@ -1025,7 +1010,7 @@ class PowerWAFModuleSpecification extends DDSpecification { } dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 'no match; data was cleared (though rule is no longer disabled)' 1 * ctx.getOrCreateAdditive(_, true) >> { @@ -1044,13 +1029,13 @@ class PowerWAFModuleSpecification extends DDSpecification { } dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 'now we have match' 1 * reconf.reloadSubscriptions() 1 * ctx.getOrCreateAdditive(_, true) >> { pwafAdditive = it[0].openAdditive() } - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * ctx.getWafMetrics() 1 * flow.setAction({ it.blocking }) 1 * ctx.closeAdditive() >> {pwafAdditive.close()} @@ -1068,7 +1053,7 @@ class PowerWAFModuleSpecification extends DDSpecification { } dataListener.onDataAvailable(flow, ctx, bundle, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 'nothing again; we disabled the rule' 1 * reconf.reloadSubscriptions() @@ -1094,9 +1079,8 @@ class PowerWAFModuleSpecification extends DDSpecification { it.dirtyStatus.clearDirty() } dataListener = pwafModule.dataSubscriptions.first() - eventListener = pwafModule.eventSubscriptions.first() dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * reconf.reloadSubscriptions() @@ -1117,7 +1101,7 @@ class PowerWAFModuleSpecification extends DDSpecification { it.dirtyStatus.clearDirty() } dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * reconf.reloadSubscriptions() @@ -1139,7 +1123,7 @@ class PowerWAFModuleSpecification extends DDSpecification { it.dirtyStatus.clearDirty() } dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * reconf.reloadSubscriptions() @@ -1149,7 +1133,7 @@ class PowerWAFModuleSpecification extends DDSpecification { 1 * ctx.getWafMetrics() 1 * flow.isBlocking() 1 * flow.setAction({ it.blocking }) - 1 * ctx.reportEvents(_ as Collection, _) + 1 * ctx.reportEvents(_ as Collection) 1 * ctx.closeAdditive() >> {pwafAdditive.close()} 0 * _ @@ -1162,7 +1146,7 @@ class PowerWAFModuleSpecification extends DDSpecification { it.dirtyStatus.clearDirty() } dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() then: 1 * reconf.reloadSubscriptions() @@ -1175,6 +1159,7 @@ class PowerWAFModuleSpecification extends DDSpecification { } void 'initial configuration has unknown addresses'() { + Address doesNotExistAddress = new Address<>("server.request.headers.does-not-exist") def cfgService = new StubAppSecConfigService(waf: new CurrentAppSecConfig( ddConfig: AppSecConfig.valueOf([ @@ -1192,7 +1177,7 @@ class PowerWAFModuleSpecification extends DDSpecification { parameters: [ inputs: [ [ - address: 'server.request.headers.does-not-exist', + address: doesNotExistAddress.key, key_path: ['user-agent']] ], regex: '^Arachni\\/v' @@ -1209,7 +1194,7 @@ class PowerWAFModuleSpecification extends DDSpecification { pwafModule.config(cfgService) then: - pwafModule.dataSubscriptions.first().subscribedAddresses.empty + !pwafModule.dataSubscriptions.first().subscribedAddresses.contains(doesNotExistAddress) } void 'bad initial configuration is given results in no subscriptions'() { @@ -1240,7 +1225,7 @@ class PowerWAFModuleSpecification extends DDSpecification { void 'bad ResultWithData - empty list'() { def waf = new PowerWAFModule() - Powerwaf.ResultWithData rwd = new Powerwaf.ResultWithData(null, "[]") + Powerwaf.ResultWithData rwd = new Powerwaf.ResultWithData(null, "[]", null, null) Collection ret when: @@ -1252,7 +1237,7 @@ class PowerWAFModuleSpecification extends DDSpecification { void 'bad ResultWithData - empty object'() { def waf = new PowerWAFModule() - Powerwaf.ResultWithData rwd = new Powerwaf.ResultWithData(null, "[{}]") + Powerwaf.ResultWithData rwd = new Powerwaf.ResultWithData(null, "[{}]", null, null) Collection ret when: @@ -1278,7 +1263,7 @@ class PowerWAFModuleSpecification extends DDSpecification { dataListener.onDataAvailable(flow, ctx, ATTACK_BUNDLE, false) Thread thread = new Thread({ p -> latch.countDown() - eventListener.onEvent(ctx, EventType.REQUEST_END) + ctx.closeAdditive() }) thread.start() latch.await() diff --git a/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/report/AppSecEventWrapperTest.groovy b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/report/AppSecEventWrapperTest.groovy new file mode 100644 index 00000000000..7b45c1283c1 --- /dev/null +++ b/dd-java-agent/appsec/src/test/groovy/com/datadog/appsec/report/AppSecEventWrapperTest.groovy @@ -0,0 +1,45 @@ +package com.datadog.appsec.report + +import datadog.trace.test.util.DDSpecification + +class AppSecEventWrapperTest extends DDSpecification { + + void 'validate json serialization for AppSecEvent report'() { + setup: + def event = new AppSecEvent.Builder() + .withRule( + new Rule.Builder() + .withId('rule_id') + .withName('rule_name') + .withTags([tag: 'value']) + .build() + ) + .withRuleMatches( + [ + new RuleMatch.Builder() + .withOperator('rule_match_operator') + .withOperatorValue("rule_match_operator_value") + .withParameters([ + new Parameter.Builder() + .withAddress("parameter_address") + .withHighlight(['parameter_highlight']) + .withKeyPath(['parameter_key_path']) + .withValue('parameter_value') + .build() + ] + ) + .build() + ] + ) + .build() + + def expectedJson = '{"triggers":[{"rule":{"id":"rule_id","name":"rule_name","tags":{"tag":"value"}},"rule_matches":[{"operator":"rule_match_operator","operator_value":"rule_match_operator_value","parameters":[{"address":"parameter_address","highlight":["parameter_highlight"],"key_path":["parameter_key_path"],"value":"parameter_value"}]}]}]}' + + when: + def wrapper = new AppSecEventWrapper([event]) + def json = wrapper.toString() + + then: + json == expectedJson + } +} diff --git a/dd-java-agent/appsec/src/testFixtures/groovy/com/datadog/appsec/AppSecHttpServerTest.groovy b/dd-java-agent/appsec/src/testFixtures/groovy/com/datadog/appsec/AppSecHttpServerTest.groovy index b3d228aecd1..6182f9e0a4a 100644 --- a/dd-java-agent/appsec/src/testFixtures/groovy/com/datadog/appsec/AppSecHttpServerTest.groovy +++ b/dd-java-agent/appsec/src/testFixtures/groovy/com/datadog/appsec/AppSecHttpServerTest.groovy @@ -18,7 +18,7 @@ abstract class AppSecHttpServerTest extends WithHttpServer { } def setupSpec() { - SubscriptionService ss = AgentTracer.TracerAPI.get().getSubscriptionService(RequestContextSlot.APPSEC) + SubscriptionService ss = AgentTracer.get().getSubscriptionService(RequestContextSlot.APPSEC) def sco = new SharedCommunicationObjects() def config = Config.get() sco.createRemaining(config) diff --git a/dd-java-agent/appsec/src/testFixtures/groovy/com/datadog/appsec/AppSecInactiveHttpServerTest.groovy b/dd-java-agent/appsec/src/testFixtures/groovy/com/datadog/appsec/AppSecInactiveHttpServerTest.groovy index 4aa8e246f66..62fe251f33a 100644 --- a/dd-java-agent/appsec/src/testFixtures/groovy/com/datadog/appsec/AppSecInactiveHttpServerTest.groovy +++ b/dd-java-agent/appsec/src/testFixtures/groovy/com/datadog/appsec/AppSecInactiveHttpServerTest.groovy @@ -7,7 +7,7 @@ import datadog.trace.agent.test.base.WithHttpServer import datadog.trace.api.Config import datadog.trace.api.gateway.RequestContextSlot import datadog.trace.api.gateway.SubscriptionService -import datadog.trace.bootstrap.instrumentation.api.AgentTracer.TracerAPI +import datadog.trace.bootstrap.instrumentation.api.AgentTracer import datadog.trace.core.DDSpan import okhttp3.FormBody import okhttp3.HttpUrl @@ -31,7 +31,7 @@ abstract class AppSecInactiveHttpServerTest extends WithHttpServer { } void setupSpec() { - SubscriptionService ss = TracerAPI.get().getSubscriptionService(RequestContextSlot.APPSEC) + SubscriptionService ss = AgentTracer.get().getSubscriptionService(RequestContextSlot.APPSEC) def sco = new SharedCommunicationObjects() def config = Config.get() sco.createRemaining(config) diff --git a/dd-java-agent/build.gradle b/dd-java-agent/build.gradle index 9958a0d0671..0e31e9aa07f 100644 --- a/dd-java-agent/build.gradle +++ b/dd-java-agent/build.gradle @@ -14,23 +14,29 @@ apply from: "$rootDir/gradle/publish.gradle" configurations { shadowInclude sharedShadowInclude + traceShadowInclude } sourceCompatibility = JavaVersion.VERSION_1_7 targetCompatibility = JavaVersion.VERSION_1_7 /* - * 7 shadow jars are created + * Several shadow jars are created * - The main "dd-java-agent" jar that also has the bootstrap project - * - 5 jars based on projects (instrumentation, jmxfetch, profiling, appsec, iast) - * - 1 based on the shared dependencies + * - Major feature jars (trace, instrumentation, jmxfetch, profiling, appsec, iast, debugger, ci-visibility) + * - A shared dependencies jar * This general config is shared by all of them */ ext.generalShadowJarConfig = { mergeServiceFiles() - exclude '**/META-INF/maven/' + duplicatesStrategy = DuplicatesStrategy.FAIL + + // Remove some cruft from the final jar. + // These patterns should NOT include **/META-INF/maven/**/pom.properties, which is + // used to report our own dependencies. + exclude '**/META-INF/maven/**/pom.xml' exclude '**/META-INF/proguard/' exclude '**/META-INF/*.kotlin_module' exclude '**/module-info.class' @@ -42,6 +48,14 @@ ext.generalShadowJarConfig = { // rewrite dependencies calling Logger.getLogger relocate 'java.util.logging.Logger', 'datadog.trace.bootstrap.PatchLogger' + final String projectName = "${project.name}" + + // Prevents conflict with other OkHttp instances, but don't relocate instrumentation + if (!projectName.equals('instrumentation')) { + relocate 'okhttp3', 'datadog.okhttp3' + relocate 'okio', 'datadog.okio' + } + if (!project.hasProperty("disableShadowRelocate") || !disableShadowRelocate) { // shadow OT impl to prevent casts to implementation relocate 'datadog.trace.common', 'datadog.trace.agent.common' @@ -119,6 +133,15 @@ def sharedShadowJar = tasks.register('sharedShadowJar', ShadowJar) { } includeShadowJar(sharedShadowJar, 'shared') +// place the tracer in its own shadow jar separate to instrumentation +def traceShadowJar = tasks.register('traceShadowJar', ShadowJar) { + configurations = [project.configurations.traceShadowInclude] + it.destinationDirectory.set(file("${project.buildDir}/trace-lib")) + archiveClassifier = 'trace' + it.dependencies deps.excludeShared +} +includeShadowJar(traceShadowJar, 'trace') + shadowJar generalShadowJarConfig >> { configurations = [project.configurations.shadowInclude] @@ -169,6 +192,12 @@ tasks.withType(GenerateMavenPom).configureEach { task -> } dependencies { + modules { + module("com.squareup.okio:okio") { + replacedBy("com.datadoghq.okio:okio") // embed our patched fork + } + } + testImplementation(project(':dd-java-agent:agent-bootstrap')) { exclude group: 'com.datadoghq', module: 'agent-logging' } @@ -200,6 +229,7 @@ dependencies { // do not bring along slf4j and dependent subprojects // (which are loaded on the bootstrap cl) } + sharedShadowInclude deps.cafe_crypto sharedShadowInclude project(':remote-config'), { transitive = false } @@ -212,6 +242,10 @@ dependencies { sharedShadowInclude project(':utils:version-utils'), { transitive = false } + sharedShadowInclude project(':dd-java-agent:agent-crashtracking'), { + transitive = false + } + traceShadowInclude project(':dd-trace-core') } tasks.withType(Test).configureEach { diff --git a/dd-java-agent/instrumentation/aerospike-4/src/main/java/datadog/trace/instrumentation/aerospike4/AerospikeClientDecorator.java b/dd-java-agent/instrumentation/aerospike-4/src/main/java/datadog/trace/instrumentation/aerospike4/AerospikeClientDecorator.java index dce73416df0..96f4f8e031d 100644 --- a/dd-java-agent/instrumentation/aerospike-4/src/main/java/datadog/trace/instrumentation/aerospike4/AerospikeClientDecorator.java +++ b/dd-java-agent/instrumentation/aerospike-4/src/main/java/datadog/trace/instrumentation/aerospike4/AerospikeClientDecorator.java @@ -17,10 +17,7 @@ public class AerospikeClientDecorator extends DBTypeProcessingDatabaseClientDecorator { private static final String DB_TYPE = "aerospike"; private static final String SERVICE_NAME = - SpanNaming.instance() - .namingSchema() - .database() - .service(Config.get().getServiceName(), DB_TYPE); + SpanNaming.instance().namingSchema().database().service(DB_TYPE); public static final UTF8BytesString JAVA_AEROSPIKE = UTF8BytesString.create("java-aerospike"); public static final UTF8BytesString OPERATION_NAME = UTF8BytesString.create(SpanNaming.instance().namingSchema().database().operation(DB_TYPE)); diff --git a/dd-java-agent/instrumentation/akka-http-10.0/build.gradle b/dd-java-agent/instrumentation/akka-http-10.0/build.gradle index 33a89187a72..a63f2c4c764 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/build.gradle +++ b/dd-java-agent/instrumentation/akka-http-10.0/build.gradle @@ -11,7 +11,10 @@ apply plugin: 'call-site-instrumentation' // we put the test classes in the baseTest test set so that the scala // version is not inherited addTestSuite('baseTest') +addTestSuiteExtendingForDir('baseForkedTest', 'baseTest', 'baseTest') addTestSuiteForDir('version101Test', 'baseTest') +addTestSuiteExtendingForDir('version101ForkedTest', 'version101Test', 'baseTest') +addTestSuiteForDir('version102Scala213Test', 'latestDepTest') addTestSuite('latestDepTest') addTestSuite('lagomTest') addTestSuite('iastTest') @@ -87,6 +90,9 @@ artifacts { } sourceSets { + version102Scala213Test.groovy.srcDir sourceSets.baseTest.groovy + version102Scala213Test.scala.srcDir sourceSets.baseTest.scala + latestDepTest.groovy.srcDir sourceSets.baseTest.groovy latestDepTest.scala.srcDir sourceSets.baseTest.scala } @@ -107,8 +113,9 @@ dependencies { // First 10.0.x version with a convenient way to test http2 support baseTestImplementation group: 'com.typesafe.akka', name: 'akka-http_2.11', version: '10.0.10' baseTestImplementation group: 'com.typesafe.akka', name: 'akka-http2-support_2.11', version: '10.0.10' + baseTestImplementation group: 'com.typesafe.akka', name: 'akka-http-jackson_2.11', version: '10.0.10' + baseTestImplementation group: 'com.typesafe.akka', name: 'akka-http-spray-json_2.11', version: '10.0.10' - iastTestImplementation group: 'com.typesafe.akka', name: 'akka-http-jackson_2.11', version: '10.0.10' iastTestImplementation(testFixtures(project(':dd-java-agent:agent-iast'))) iastTestCompileOnly group: 'de.thetaphi', name: 'forbiddenapis', version: '3.4' iastTestRuntimeOnly project(':dd-java-agent:instrumentation:jackson-core') @@ -120,8 +127,9 @@ dependencies { version101TestImplementation group: 'com.typesafe.akka', name: 'akka-http_2.12', version: '10.1.+' version101TestImplementation group: 'com.typesafe.akka', name: 'akka-http2-support_2.12', version: '10.1.+' version101TestImplementation group: 'com.typesafe.akka', name: 'akka-stream_2.12', version: '2.5.+' + version101TestImplementation group: 'com.typesafe.akka', name: 'akka-http-jackson_2.12', version: '10.1.+' + version101TestImplementation group: 'com.typesafe.akka', name: 'akka-http-spray-json_2.12', version: '10.1.+' - version101IastTestImplementation group: 'com.typesafe.akka', name: 'akka-http-jackson_2.12', version: '10.1.+' version101IastTestImplementation(testFixtures(project(':dd-java-agent:agent-iast'))) version102IastTestImplementation deps.scala212 @@ -130,17 +138,28 @@ dependencies { version102IastTestImplementation group: 'com.typesafe.akka', name: 'akka-http-jackson_2.12', version: '10.2.+' version102IastTestImplementation(testFixtures(project(':dd-java-agent:agent-iast'))) + version102Scala213TestImplementation deps.scala213 + version102Scala213TestImplementation group: 'com.typesafe.akka', name: 'akka-http_2.13', version: '10.2.+' + version102Scala213TestImplementation group: 'com.typesafe.akka', name: 'akka-stream_2.13', version: '2.6.+' + version102Scala213TestImplementation group: 'com.typesafe.akka', name: 'akka-http-jackson_2.13', version: '10.2.+' + version102Scala213TestImplementation group: 'com.typesafe.akka', name: 'akka-http-spray-json_2.13', version: '10.2.+' + latestDepTestImplementation deps.scala213 - latestDepTestImplementation group: 'com.typesafe.akka', name: 'akka-http_2.13', version: '10.2.+' + latestDepTestImplementation group: 'com.typesafe.akka', name: 'akka-http_2.13', version: '10.5.+' // http2 support is included in akka-http since 10.2.x - latestDepTestImplementation group: 'com.typesafe.akka', name: 'akka-stream_2.13', version: '2.6.+' + latestDepTestImplementation group: 'com.typesafe.akka', name: 'akka-stream_2.13', version: '2.7.0' + latestDepTestImplementation group: 'com.typesafe.akka', name: 'akka-http-jackson_2.13', version: '10.5.+' + latestDepTestImplementation group: 'com.typesafe.akka', name: 'akka-http-spray-json_2.13', version: '10.5.+' // TODO: test with Scala 3 latestDepIastTestImplementation deps.scala213 - latestDepIastTestImplementation group: 'com.typesafe.akka', name: 'akka-http_2.13', version: '10.+' - latestDepIastTestImplementation group: 'com.typesafe.akka', name: 'akka-stream_2.13', version: '2.+' - latestDepIastTestImplementation group: 'com.typesafe.akka', name: 'akka-actor_2.13', version: '2.+' - latestDepIastTestImplementation group: 'com.typesafe.akka', name: 'akka-http-jackson_2.13', version: '10.+' + // Use akka-2.8.+ since latest akka-http release is still not compatible with akka-2.9.0-M1+. + // akka-http versions are limited to 10.5.2 as this is the final version with Java 8 support + // See: https://github.com/akka/akka-http/releases + latestDepIastTestImplementation group: 'com.typesafe.akka', name: 'akka-http_2.13', version: '[10.+,10.5.2)' + latestDepIastTestImplementation group: 'com.typesafe.akka', name: 'akka-stream_2.13', version: '2.8.+' + latestDepIastTestImplementation group: 'com.typesafe.akka', name: 'akka-actor_2.13', version: '2.8.+' + latestDepIastTestImplementation group: 'com.typesafe.akka', name: 'akka-http-jackson_2.13', version: '[10.+,10.5.2)' latestDepIastTestImplementation(testFixtures(project(':dd-java-agent:agent-iast'))) lagomTestImplementation deps.scala211 @@ -149,20 +168,12 @@ dependencies { lagomTestImplementation group: 'com.lightbend.lagom', name: 'lagom-javadsl-testkit_2.11', version: '1.4.0' } -tasks.named("test").configure { - dependsOn "baseTest" - dependsOn "version101Test" - dependsOn "lagomTest" - dependsOn "iastTest" - dependsOn "version101IastTest" - dependsOn "version102IastTest" -} - -tasks.named('latestDepTest').configure { - dependsOn "latestDepIastTest" +compileBaseTestGroovy { + classpath = classpath.plus(files(compileBaseTestScala.destinationDirectory)) + dependsOn "compileBaseTestScala" } -compileBaseTestGroovy { +compileBaseForkedTestGroovy { classpath = classpath.plus(files(compileBaseTestScala.destinationDirectory)) dependsOn "compileBaseTestScala" } @@ -172,6 +183,16 @@ compileVersion101TestGroovy { dependsOn "compileVersion101TestScala" } +compileVersion101ForkedTestGroovy { + classpath = classpath.plus(files(compileVersion101TestScala.destinationDirectory)) + dependsOn "compileVersion101TestScala" +} + +compileVersion102Scala213TestGroovy { + classpath = classpath.plus(files(compileVersion102Scala213TestScala.destinationDirectory)) + dependsOn "compileVersion102Scala213TestScala" +} + compileLatestDepTestGroovy { classpath = classpath.plus(files(compileLatestDepTestScala.destinationDirectory)) dependsOn "compileLatestDepTestScala" diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/baseTest/groovy/AkkaHttpClientInstrumentationTest.groovy b/dd-java-agent/instrumentation/akka-http-10.0/src/baseTest/groovy/AkkaHttpClientInstrumentationTest.groovy index 6bb82efb06b..6ae3c72deaf 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/baseTest/groovy/AkkaHttpClientInstrumentationTest.groovy +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/baseTest/groovy/AkkaHttpClientInstrumentationTest.groovy @@ -88,21 +88,18 @@ abstract class AkkaHttpClientInstrumentationTest extends HttpClientTest { span { parent() operationName operation() - resourceName "akka-http.client.request" + resourceName operation() // resource name is not set so defaults to operationName spanType DDSpanTypes.HTTP_CLIENT errored true tags { "$Tags.COMPONENT" "akka-http-client" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT errorTags(exception) - defaultTags() + defaultTags(false, false) } } } } - - where: - renameService << [false, true] } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/baseTest/groovy/AkkaHttpServerInstrumentationTest.groovy b/dd-java-agent/instrumentation/akka-http-10.0/src/baseTest/groovy/AkkaHttpServerInstrumentationTest.groovy index 4b4160930e1..b49a9a2bfb7 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/baseTest/groovy/AkkaHttpServerInstrumentationTest.groovy +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/baseTest/groovy/AkkaHttpServerInstrumentationTest.groovy @@ -3,11 +3,21 @@ import datadog.trace.agent.test.base.HttpServerTest import datadog.trace.agent.test.naming.TestingGenericHttpNamingConventions import datadog.trace.agent.test.utils.ThreadUtils import datadog.trace.instrumentation.akkahttp.AkkaHttpServerDecorator +import okhttp3.HttpUrl +import okhttp3.MultipartBody import okhttp3.Request +import okhttp3.RequestBody +import okhttp3.Response import spock.lang.Shared import java.util.concurrent.atomic.AtomicInteger +import static datadog.trace.agent.test.base.HttpServerTest.IG_ASK_FOR_RESPONSE_HEADER_TAGS_HEADER +import static datadog.trace.agent.test.base.HttpServerTest.ServerEndpoint.BODY_JSON +import static datadog.trace.agent.test.base.HttpServerTest.ServerEndpoint.BODY_MULTIPART +import static datadog.trace.agent.test.base.HttpServerTest.ServerEndpoint.SUCCESS +import static org.junit.Assume.assumeTrue + abstract class AkkaHttpServerInstrumentationTest extends HttpServerTest { @Override @@ -26,13 +36,13 @@ abstract class AkkaHttpServerInstrumentationTest extends HttpServerTest ActorSystem(name) case Some(config) => ActorSystem(name, config) } } - implicit val materializer = ActorMaterializer() + implicit val materializer: ActorMaterializer = ActorMaterializer() private var port: Int = 0 - private var portBinding: Future[ServerBinding] = null + private var portBinding: Future[ServerBinding] = _ override def start(): Unit = { portBinding = Await.ready(binder.bind(0), 10 seconds) @@ -61,8 +71,8 @@ object AkkaHttpTestWebServer { def config: Option[Config] = None def bind(port: Int)( - implicit system: ActorSystem, - materializer: Materializer + implicit system: ActorSystem, + materializer: Materializer ): Future[ServerBinding] } @@ -70,8 +80,8 @@ object AkkaHttpTestWebServer { override def name: String = "bind-and-handle" override def bind(port: Int)( - implicit system: ActorSystem, - materializer: Materializer + implicit system: ActorSystem, + materializer: Materializer ): Future[ServerBinding] = { import materializer.executionContext Http().bindAndHandle(route, "localhost", port) @@ -81,9 +91,15 @@ object AkkaHttpTestWebServer { val BindAndHandleAsyncWithRouteAsyncHandler: Binder = new Binder { override def name: String = "bind-and-handle-async-with-route-async-handler" + override def config: Option[Config] = Some( + ConfigFactory.load() + .withValue("akka.http.server.request-timeout", ConfigValueFactory.fromAnyRef("300 s")) + .withValue("akka.http.server.idle-timeout", ConfigValueFactory.fromAnyRef("300 s")) + ) + override def bind(port: Int)( - implicit system: ActorSystem, - materializer: Materializer + implicit system: ActorSystem, + materializer: Materializer ): Future[ServerBinding] = { import materializer.executionContext Http().bindAndHandleAsync(Route.asyncHandler(route), "localhost", port) @@ -94,8 +110,8 @@ object AkkaHttpTestWebServer { override def name: String = "bind-and-handle-sync" override def bind(port: Int)( - implicit system: ActorSystem, - materializer: Materializer + implicit system: ActorSystem, + materializer: Materializer ): Future[ServerBinding] = { Http().bindAndHandleSync(syncHandler, "localhost", port) } @@ -105,8 +121,8 @@ object AkkaHttpTestWebServer { override def name: String = "bind-and-handle-async" override def bind(port: Int)( - implicit system: ActorSystem, - materializer: Materializer + implicit system: ActorSystem, + materializer: Materializer ): Future[ServerBinding] = { import materializer.executionContext Http().bindAndHandleAsync(asyncHandler, "localhost", port) @@ -117,8 +133,8 @@ object AkkaHttpTestWebServer { override def name: String = "bind-and-handle-async-http2" override def bind(port: Int)( - implicit system: ActorSystem, - materializer: Materializer + implicit system: ActorSystem, + materializer: Materializer ): Future[ServerBinding] = { import materializer.executionContext val serverSettings = enableHttp2(ServerSettings(system)) @@ -134,7 +150,7 @@ object AkkaHttpTestWebServer { // This part defines the routes using the Scala routing DSL // ---------------------------------------------------------------------- // private val exceptionHandler = ExceptionHandler { - case e: Exception => + case e : Exception if !e.isInstanceOf[BlockingException] => val span = activeSpan() TraceUtils.handleException(span, e) complete( @@ -170,50 +186,140 @@ object AkkaHttpTestWebServer { private val defaultHeader = RawHeader(HttpServerTest.getIG_RESPONSE_HEADER, HttpServerTest.getIG_RESPONSE_HEADER_VALUE) + // force a rejection due to BlockingException to throw so that the error + // can be recorded in the span + private val blockingRejectionHandler: RejectionHandler = RejectionHandler.newBuilder() + .handle({ + case MalformedRequestContentRejection(_, cause: BlockingException) => + throw cause + }).result() + def route(implicit ec: ExecutionContext): Route = withController { - respondWithDefaultHeader(defaultHeader) { - get { - path(SUCCESS.relativePath) { - complete( - HttpResponse(status = SUCCESS.getStatus, entity = SUCCESS.getBody) - ) - } ~ path(FORWARDED.relativePath) { - headerValueByName("x-forwarded-for") { address => + handleRejections(blockingRejectionHandler) { + respondWithDefaultHeader(defaultHeader) { + get { + path(SUCCESS.relativePath) { complete( - HttpResponse(status = FORWARDED.getStatus, entity = address) + HttpResponse(status = SUCCESS.getStatus, entity = SUCCESS.getBody) ) - } - } ~ path( - QUERY_PARAM.relativePath | QUERY_ENCODED_BOTH.relativePath | QUERY_ENCODED_QUERY.relativePath - ) { - parameter("some") { query => - complete( - HttpResponse( - status = QUERY_PARAM.getStatus, - entity = s"some=$query" + } ~ path(FORWARDED.relativePath) { + headerValueByName("x-forwarded-for") { address => + complete( + HttpResponse(status = FORWARDED.getStatus, entity = address) + ) + } + } ~ path( + QUERY_PARAM.relativePath | QUERY_ENCODED_BOTH.relativePath | QUERY_ENCODED_QUERY.relativePath + ) { + parameter("some") { query => + complete( + HttpResponse( + status = QUERY_PARAM.getStatus, + entity = s"some=$query" + ) ) + } + } ~ path(REDIRECT.relativePath) { + redirect(Uri(REDIRECT.getBody), StatusCodes.Found) + } ~ path(USER_BLOCK.relativePath) { + Blocking.forUser("user-to-block").blockIfMatch() + complete( + HttpResponse(status = SUCCESS.getStatus, entity = "Should not be reached") ) - } - } ~ path(REDIRECT.relativePath) { - redirect(Uri(REDIRECT.getBody), StatusCodes.Found) - } ~ path(ERROR.relativePath) { - complete(HttpResponse(status = ERROR.getStatus, entity = ERROR.getBody)) - } ~ path(EXCEPTION.relativePath) { - throw new Exception(EXCEPTION.getBody) - } ~ pathPrefix("injected-id") { - path("ping" / IntNumber) { id => - val traceId = AgentTracer.activeSpan().getTraceId - complete(s"pong $id -> $traceId") - } ~ path("fing" / IntNumber) { id => - // force the response to happen on another thread or in another context - onSuccess(Future { - Thread.sleep(10); - id - }) { fid => + } ~ path(ERROR.relativePath) { + complete(HttpResponse(status = ERROR.getStatus, entity = ERROR.getBody)) + } ~ path(EXCEPTION.relativePath) { + throw new Exception(EXCEPTION.getBody) + } ~ pathPrefix("injected-id") { + path("ping" / IntNumber) { id => val traceId = AgentTracer.activeSpan().getTraceId - complete(s"fong $fid -> $traceId") + complete(s"pong $id -> $traceId") + } ~ path("fing" / IntNumber) { id => + // force the response to happen on another thread or in another context + onSuccess(Future { + Thread.sleep(10) + id + }) { fid => + val traceId = AgentTracer.activeSpan().getTraceId + complete(s"fong $fid -> $traceId") + } } + } ~ path(USER_BLOCK.relativePath()) { + Blocking.forUser("user-to-block").blockIfMatch() + complete(HttpResponse(status = 200, entity = "should never be reached")) } + } ~ post { + path(CREATED.relativePath()) { + entity(as[String]) { s => + complete( + HttpResponse( + status = CREATED.getStatus, + entity = s"created: $s" + ) + ) + } + } ~ + path(BODY_URLENCODED.relativePath()) { + formFieldMultiMap { m => + complete( + HttpResponse( + status = BODY_URLENCODED.getStatus, + entity = m.toStringAsGroovy + ) + ) + } + } ~ + path(BODY_JSON.relativePath()) { + parameter(Symbol("variant") ?) { + case Some("spray") => + entity(Unmarshaller.messageUnmarshallerFromEntityUnmarshaller(sprayMapUnmarshaller)) { m => + complete( + HttpResponse( + status = BODY_JSON.getStatus, + entity = SprayMapFormat.write(m).compactPrint + ) + ) + } + case _ => // jackson + entity(Unmarshaller.messageUnmarshallerFromEntityUnmarshaller(jacksonMapUnmarshaller)) { m => + complete( + HttpResponse( + status = BODY_JSON.getStatus, + entity = SprayMapFormat.write(m).compactPrint + ) + ) + } + } + } ~ + path(BODY_MULTIPART.relativePath()) { + parameter(Symbol("variant") ?) { + case Some("strictUnmarshaller") => + entity(as[Multipart.FormData.Strict]) { formData => + val m = formData.strictParts + .groupBy(_.name) + .mapValues( + _.map((bp: BodyPart.Strict) => + bp.entity.data.utf8String + ).toList + ) + complete( + HttpResponse( + status = BODY_MULTIPART.getStatus, + entity = m.toStringAsGroovy + ) + ) + } + case _ => + formFieldMultiMap { m => + complete( + HttpResponse( + status = BODY_MULTIPART.getStatus, + entity = m.toStringAsGroovy + ) + ) + } + } + } } } } @@ -223,8 +329,8 @@ object AkkaHttpTestWebServer { // ---------------------------------------------------------------------- // val syncHandler: HttpRequest => HttpResponse = { - case HttpRequest(GET, uri: Uri, _, _, _) => { - val path = uri.path.toString() + case HttpRequest(GET, uri: Uri, _, _, _) => + val path = uri.path.toString() val endpoint = HttpServerTest.ServerEndpoint.forPath(path) HttpServerTest .controller( @@ -233,20 +339,25 @@ object AkkaHttpTestWebServer { def doCall(): HttpResponse = { val resp = HttpResponse(status = endpoint.getStatus) endpoint match { - case SUCCESS => resp.withEntity(endpoint.getBody) + case SUCCESS => resp.withEntity(endpoint.getBody) case FORWARDED => resp.withEntity(endpoint.getBody) // cheating case QUERY_PARAM | QUERY_ENCODED_BOTH | QUERY_ENCODED_QUERY => resp.withEntity(uri.queryString().orNull) case REDIRECT => resp.withHeaders(headers.Location(endpoint.getBody)) - case ERROR => resp.withEntity(endpoint.getBody) + case ERROR => resp.withEntity(endpoint.getBody) case EXCEPTION => throw new Exception(endpoint.getBody) + case USER_BLOCK => { + Blocking.forUser("user-to-block").blockIfMatch() + // should never be output: + resp.withEntity("should never be reached") + } case _ => if (path.startsWith("/injected-id/")) { val groups = path.split('/') - if (groups.size == 4) { // The path starts with a / and has 3 segments + if (groups.length == 4) { // The path starts with a / and has 3 segments val traceId = AgentTracer.activeSpan().getTraceId - val id = groups(3).toInt + val id = groups(3).toInt groups(2) match { case "ping" => return HttpResponse(entity = s"pong $id -> $traceId") @@ -263,13 +374,57 @@ object AkkaHttpTestWebServer { } ) .withDefaultHeaders(defaultHeader) - } } def asyncHandler( - implicit ec: ExecutionContext - ): HttpRequest => Future[HttpResponse] = { request => - Future { + implicit ec: ExecutionContext, + mat: Materializer + ): HttpRequest => Future[HttpResponse] = { + case request@HttpRequest(POST, uri, _, entity, _) => + val path = request.uri.path.toString + val endpoint = HttpServerTest.ServerEndpoint.forPath(path) + + endpoint match { + case CREATED => + Unmarshal(entity).to[String].map { bodyStr => + HttpResponse(status = CREATED.getStatus) + .withEntity(s"${CREATED.getBody}: $bodyStr") + } + case BODY_MULTIPART => + uri.query().get("variant") match { + case Some("strictUnmarshaller") => + val eventualStrict = Unmarshal(entity).to[FormData.Strict] + eventualStrict.map { s => + HttpResponse(status = BODY_MULTIPART.getStatus) + .withEntity(s.toStringAsGroovy) + } + case _ => + val fd = Unmarshal(entity).to[Multipart.FormData] + val eventualStrict = fd.flatMap(_.toStrict(500 millis)) + eventualStrict.map { s => + HttpResponse(status = BODY_MULTIPART.getStatus) + .withEntity(s.toStringAsGroovy) + } + } + case BODY_URLENCODED => + val eventualData = Unmarshal(entity).to[model.FormData] + eventualData.map { d => + HttpResponse(status = BODY_URLENCODED.getStatus) + .withEntity(d.toStringAsGroovy) + } + case BODY_JSON => + val unmarshaller = uri.query().get("variant") match { + case Some("spray") => sprayMapUnmarshaller + case _ => jacksonMapUnmarshaller + } + val eventualData = Unmarshal(entity).to[Map[String, String]](unmarshaller, ec, mat) + eventualData.map { d => + HttpResponse(status = BODY_URLENCODED.getStatus) + .withEntity(SprayMapFormat.write(d).compactPrint) + } + case _ => Future.successful(HttpResponse(404)) + } + case request => Future { syncHandler(request) } } @@ -279,4 +434,70 @@ object AkkaHttpTestWebServer { serverSettings.previewServerSettings.withEnableHttp2(true) serverSettings.withPreviewServerSettings(previewServerSettings) } + + implicit class MapExtensions[A](m: Iterable[(String, A)]) { + def toStringAsGroovy: String = { + def valueToString(value: Object) : String = value match { + case seq: Seq[_] => seq.map(x => valueToString(x.asInstanceOf[Object])).mkString("[", ",", "]") + case other => other.toString + } + + m.map { case (key, value) => s"$key:${valueToString(value.asInstanceOf[Object])}" } + .mkString("[", ",", "]") + } + } + + implicit class MultipartFormDataStrictExtensions(strict: Multipart.FormData.Strict) { + def toStringAsGroovy: String = + strict.strictParts + .groupBy(_.name) + .mapValues( + _.map((bp: BodyPart.Strict) => + bp.entity.data.utf8String + ).toList + ).toStringAsGroovy + } + + implicit class FormDataExtensions(formData: model.FormData) { + def toStringAsGroovy: String = formData.fields.toMultiMap.toStringAsGroovy + } + + implicit def strictMultipartFormDataUnmarshaller: FromEntityUnmarshaller[Multipart.FormData.Strict] = { + val toStrictUnmarshaller = Unmarshaller.withMaterializer[HttpEntity, HttpEntity.Strict] { + implicit ec => + implicit mat => + entity => + entity.toStrict(1000.millis) + } + val toFormDataUnmarshaller = MultipartUnmarshallers.multipartFormDataUnmarshaller + val downcastUnmarshaller = Unmarshaller.strict[Multipart.FormData, Multipart.FormData.Strict] { + case strict: Multipart.FormData.Strict => strict + case _ => throw new RuntimeException("Expected Strict form data at this point") + } + + toStrictUnmarshaller.andThen(toFormDataUnmarshaller).andThen(downcastUnmarshaller) + } + + val jacksonMapUnmarshaller: FromEntityUnmarshaller[Map[String,String]] = { + Jackson.unmarshaller(classOf[java.util.Map[String, String]]).asScala.map( + javaMap => { + import scala.collection.JavaConverters._ + javaMap.asScala.toMap + } + ) + } + + object SprayMapFormat extends RootJsonFormat[Map[String, String]] { + def write(map: Map[String, String]): JsObject = JsObject(map.mapValues(JsString(_)).toMap) + + def read(value: JsValue): Map[String, String] = value match { + case JsObject(fields) => fields.collect { + case (k, JsString(v)) => k -> v + } + case _ => deserializationError("Expected a JSON object") + } + } + + val sprayMapUnmarshaller: FromEntityUnmarshaller[Map[String, String]] = + SprayJsonSupport.sprayJsonUnmarshaller[Map[String, String]](SprayMapFormat) } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/lagomTest/groovy/LagomTest.groovy b/dd-java-agent/instrumentation/akka-http-10.0/src/lagomTest/groovy/LagomTest.groovy index bd01b081e8b..05dfc1d3eb9 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/lagomTest/groovy/LagomTest.groovy +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/lagomTest/groovy/LagomTest.groovy @@ -71,6 +71,9 @@ class LagomTest extends AgentTestRunner { "$Tags.HTTP_METHOD" "GET" "$Tags.HTTP_STATUS" 101 "$Tags.HTTP_USER_AGENT" String + "$Tags.PEER_HOST_IPV4" '127.0.0.1' + "$Tags.PEER_PORT" Integer + "$Tags.HTTP_CLIENT_IP" '127.0.0.1' defaultTags() } } @@ -115,6 +118,9 @@ class LagomTest extends AgentTestRunner { "$Tags.HTTP_METHOD" "GET" "$Tags.HTTP_STATUS" 500 "$Tags.HTTP_USER_AGENT" String + "$Tags.PEER_HOST_IPV4" '127.0.0.1' + "$Tags.PEER_PORT" Integer + "$Tags.HTTP_CLIENT_IP" '127.0.0.1' defaultTags() } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/latestDepTest/groovy/AkkaHttp102ServerInstrumentationTests.groovy b/dd-java-agent/instrumentation/akka-http-10.0/src/latestDepTest/groovy/AkkaHttp102ServerInstrumentationTests.groovy index 1b21641eb23..a8c4134f9f5 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/latestDepTest/groovy/AkkaHttp102ServerInstrumentationTests.groovy +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/latestDepTest/groovy/AkkaHttp102ServerInstrumentationTests.groovy @@ -24,6 +24,27 @@ class AkkaHttp102ServerInstrumentationBindSyncTest extends AkkaHttpServerInstrum HttpServer server() { return new AkkaHttpTestWebServer(AkkaHttp102TestWebServer.ServerBuilderBindSync()) } + + // we test body endpoints only on the async tests + @Override + boolean testRequestBody() { + false + } + + @Override + boolean testBodyMultipart() { + false + } + + @Override + boolean testBodyJson() { + false + } + + @Override + boolean testBodyUrlencoded() { + false + } } class AkkaHttp102ServerInstrumentationBindAsyncHttp2Test extends AkkaHttpServerInstrumentationTest { diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttp2ServerInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttp2ServerInstrumentation.java index 3c7091b6fce..e0363d1f130 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttp2ServerInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttp2ServerInstrumentation.java @@ -9,6 +9,8 @@ import akka.stream.Materializer; import com.google.auto.service.AutoService; import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.agent.tooling.muzzle.Reference; +import datadog.trace.instrumentation.akkahttp.appsec.ScalaListCollectorMuzzleReferences; import net.bytebuddy.asm.Advice; import scala.Function1; import scala.concurrent.Future; @@ -38,10 +40,19 @@ public String[] helperClassNames() { packageName + ".DatadogAsyncHandlerWrapper$2", packageName + ".AkkaHttpServerHeaders", packageName + ".AkkaHttpServerDecorator", + packageName + ".RecoverFromBlockedExceptionPF", packageName + ".UriAdapter", + packageName + ".appsec.AkkaBlockResponseFunction", + packageName + ".appsec.BlockingResponseHelper", + packageName + ".appsec.ScalaListCollector", }; } + @Override + public Reference[] additionalMuzzleReferences() { + return ScalaListCollectorMuzzleReferences.additionalMuzzleReferences(); + } + @Override public void adviceTransformations(AdviceTransformation transformation) { transformation.applyAdvice( @@ -70,7 +81,7 @@ public static void enter( @Advice.Argument(value = 0, readOnly = false) Function1> handler, @Advice.Argument(value = 7) final Materializer materializer) { - handler = new DatadogAsyncHandlerWrapper(handler, materializer.executionContext()); + handler = new DatadogAsyncHandlerWrapper(handler, materializer); } } @@ -80,7 +91,7 @@ public static void enter( @Advice.Argument(value = 0, readOnly = false) Function1> handler, @Advice.Argument(value = 6) final Materializer materializer) { - handler = new DatadogAsyncHandlerWrapper(handler, materializer.executionContext()); + handler = new DatadogAsyncHandlerWrapper(handler, materializer); } } @@ -90,7 +101,7 @@ public static void enter( @Advice.Argument(value = 0, readOnly = false) Function1> handler, @Advice.Argument(value = 5) final Materializer materializer) { - handler = new DatadogAsyncHandlerWrapper(handler, materializer.executionContext()); + handler = new DatadogAsyncHandlerWrapper(handler, materializer); } } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerDecorator.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerDecorator.java index 8b7290747ef..efa73e608cd 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerDecorator.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerDecorator.java @@ -1,11 +1,19 @@ package datadog.trace.instrumentation.akkahttp; +import akka.http.javadsl.model.HttpHeader; +import akka.http.javadsl.model.headers.RemoteAddress; import akka.http.scaladsl.model.HttpRequest; import akka.http.scaladsl.model.HttpResponse; +import datadog.trace.api.gateway.BlockResponseFunction; import datadog.trace.bootstrap.instrumentation.api.AgentPropagation; import datadog.trace.bootstrap.instrumentation.api.URIDataAdapter; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; import datadog.trace.bootstrap.instrumentation.decorator.HttpServerDecorator; +import datadog.trace.instrumentation.akkahttp.appsec.AkkaBlockResponseFunction; +import java.net.InetAddress; +import java.util.Optional; +import scala.Option; +import scala.reflect.ClassTag$; public class AkkaHttpServerDecorator extends HttpServerDecorator { @@ -52,11 +60,26 @@ protected URIDataAdapter url(final HttpRequest httpRequest) { @Override protected String peerHostIP(final HttpRequest httpRequest) { + Option header = httpRequest.header(ClassTag$.MODULE$.apply(RemoteAddress.class)); + if (!header.isEmpty()) { + RemoteAddress httpHeader = (RemoteAddress) header.get(); + akka.http.javadsl.model.RemoteAddress remAddress = httpHeader.address(); + Optional address = remAddress.getAddress(); + if (address.isPresent()) { + return address.get().getHostAddress(); + } + } return null; } @Override protected int peerPort(final HttpRequest httpRequest) { + Option header = httpRequest.header(ClassTag$.MODULE$.apply(RemoteAddress.class)); + if (!header.isEmpty()) { + RemoteAddress httpHeader = (RemoteAddress) header.get(); + akka.http.javadsl.model.RemoteAddress address = httpHeader.address(); + return address.getPort(); + } return 0; } @@ -64,4 +87,15 @@ protected int peerPort(final HttpRequest httpRequest) { protected int status(final HttpResponse httpResponse) { return httpResponse.status().intValue(); } + + @Override + protected boolean isAppSecOnResponseSeparate() { + return true; + } + + @Override + protected BlockResponseFunction createBlockResponseFunction( + HttpRequest httpRequest, HttpRequest httpRequest2) { + return new AkkaBlockResponseFunction(httpRequest); + } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerHeaders.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerHeaders.java index 0ddaaeb9ae9..c5aabac9926 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerHeaders.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerHeaders.java @@ -1,34 +1,69 @@ package datadog.trace.instrumentation.akkahttp; import akka.http.javadsl.model.HttpHeader; +import akka.http.javadsl.model.headers.RawRequestURI; +import akka.http.javadsl.model.headers.RemoteAddress; +import akka.http.javadsl.model.headers.TimeoutAccess; +import akka.http.scaladsl.model.ContentType; +import akka.http.scaladsl.model.HttpEntity; import akka.http.scaladsl.model.HttpMessage; import akka.http.scaladsl.model.HttpRequest; import akka.http.scaladsl.model.HttpResponse; import datadog.trace.bootstrap.instrumentation.api.AgentPropagation; -public class AkkaHttpServerHeaders - implements AgentPropagation.ContextVisitor { +public class AkkaHttpServerHeaders { + private AkkaHttpServerHeaders() {} - @SuppressWarnings("rawtypes") - private static final AkkaHttpServerHeaders GETTER = new AkkaHttpServerHeaders(); + private static final AgentPropagation.ContextVisitor GETTER_REQUEST = + AkkaHttpServerHeaders::forEachKeyRequest; + private static final AgentPropagation.ContextVisitor GETTER_RESPONSE = + AkkaHttpServerHeaders::forEachKeyResponse; - @SuppressWarnings("unchecked") public static AgentPropagation.ContextVisitor requestGetter() { - return (AgentPropagation.ContextVisitor) GETTER; + return GETTER_REQUEST; } - @SuppressWarnings("unchecked") public static AgentPropagation.ContextVisitor responseGetter() { - return (AgentPropagation.ContextVisitor) GETTER; + return GETTER_RESPONSE; } - @Override - public void forEachKey( - final HttpMessage carrier, final AgentPropagation.KeyClassifier classifier) { + private static void doForEachKey( + HttpMessage carrier, + akka.http.javadsl.model.HttpEntity entity, + AgentPropagation.KeyClassifier classifier) { + if (entity instanceof HttpEntity.Strict) { + HttpEntity.Strict strictEntity = (HttpEntity.Strict) entity; + ContentType contentType = strictEntity.contentType(); + if (contentType != null) { + if (!classifier.accept("content-type", contentType.value())) { + return; + } + } + if (!classifier.accept("content-length", Long.toString(strictEntity.contentLength()))) { + return; + } + } + for (final HttpHeader header : carrier.getHeaders()) { + // skip synthetic headers + if (header instanceof RemoteAddress + || header instanceof TimeoutAccess + || header instanceof RawRequestURI) { + continue; + } if (!classifier.accept(header.lowercaseName(), header.value())) { return; } } } + + private static void forEachKeyRequest( + HttpRequest req, AgentPropagation.KeyClassifier classifier) { + doForEachKey(req, req.entity(), classifier); + } + + private static void forEachKeyResponse( + final HttpResponse resp, final AgentPropagation.KeyClassifier classifier) { + doForEachKey(resp, resp.entity(), classifier); + } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerInstrumentation.java index 6984362b902..363c3b26805 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/AkkaHttpServerInstrumentation.java @@ -11,6 +11,8 @@ import akka.stream.scaladsl.Flow; import com.google.auto.service.AutoService; import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.agent.tooling.muzzle.Reference; +import datadog.trace.instrumentation.akkahttp.appsec.ScalaListCollectorMuzzleReferences; import net.bytebuddy.asm.Advice; /** @@ -71,9 +73,18 @@ public String[] helperClassNames() { packageName + ".AkkaHttpServerHeaders", packageName + ".AkkaHttpServerDecorator", packageName + ".UriAdapter", + packageName + ".RecoverFromBlockedExceptionPF", + packageName + ".appsec.BlockingResponseHelper", + packageName + ".appsec.ScalaListCollector", + packageName + ".appsec.AkkaBlockResponseFunction", }; } + @Override + public Reference[] additionalMuzzleReferences() { + return ScalaListCollectorMuzzleReferences.additionalMuzzleReferences(); + } + @Override public void adviceTransformations(AdviceTransformation transformation) { transformation.applyAdvice( @@ -87,6 +98,7 @@ public static void enter( @Advice.Argument(value = 0, readOnly = false) Flow handler, @Advice.Argument(value = 4, readOnly = false) ServerSettings settings) { + handler = handler.asJava().recover(RecoverFromBlockedExceptionPF.INSTANCE).asScala(); final BidiFlow wrapper = BidiFlow.fromGraph(new DatadogServerRequestResponseFlowWrapper(settings)); handler = wrapper.reversed().join(handler.asJava()).asScala(); diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DatadogAsyncHandlerWrapper.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DatadogAsyncHandlerWrapper.java index c59577694d8..56f0e5c44a0 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DatadogAsyncHandlerWrapper.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DatadogAsyncHandlerWrapper.java @@ -2,52 +2,81 @@ import akka.http.scaladsl.model.HttpRequest; import akka.http.scaladsl.model.HttpResponse; +import akka.http.scaladsl.util.FastFuture$; +import akka.stream.Materializer; +import datadog.trace.api.gateway.Flow; import datadog.trace.bootstrap.instrumentation.api.AgentScope; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.instrumentation.akkahttp.appsec.BlockingResponseHelper; import scala.Function1; -import scala.concurrent.ExecutionContext; import scala.concurrent.Future; import scala.runtime.AbstractFunction1; public class DatadogAsyncHandlerWrapper extends AbstractFunction1> { private final Function1> userHandler; - private final ExecutionContext executionContext; + private final Materializer materializer; public DatadogAsyncHandlerWrapper( final Function1> userHandler, - final ExecutionContext executionContext) { + final Materializer materializer) { this.userHandler = userHandler; - this.executionContext = executionContext; + this.materializer = materializer; } @Override public Future apply(final HttpRequest request) { final AgentScope scope = DatadogWrapperHelper.createSpan(request); - Future futureResponse = null; + AgentSpan span = scope.span(); + Future futureResponse; + + // handle blocking in the beginning of the request + Flow.Action.RequestBlockingAction rba; + if ((rba = span.getRequestBlockingAction()) != null) { + request.discardEntityBytes(materializer); + HttpResponse response = BlockingResponseHelper.maybeCreateBlockingResponse(rba, request); + span.getRequestContext().getTraceSegment().effectivelyBlocked(); + DatadogWrapperHelper.finishSpan(span, response); + return FastFuture$.MODULE$.successful().apply(response); + } + try { futureResponse = userHandler.apply(request); } catch (final Throwable t) { scope.close(); - DatadogWrapperHelper.finishSpan(scope.span(), t); + DatadogWrapperHelper.finishSpan(span, t); throw t; } + final Future wrapped = - futureResponse.transform( - new AbstractFunction1() { - @Override - public HttpResponse apply(final HttpResponse response) { - DatadogWrapperHelper.finishSpan(scope.span(), response); - return response; - } - }, - new AbstractFunction1() { - @Override - public Throwable apply(final Throwable t) { - DatadogWrapperHelper.finishSpan(scope.span(), t); - return t; - } - }, - executionContext); + futureResponse + .recoverWith( + RecoverFromBlockedExceptionPF.INSTANCE_FUTURE, materializer.executionContext()) + .transform( + new AbstractFunction1() { + @Override + public HttpResponse apply(HttpResponse response) { + // handle blocking at the middle/end of the request + HttpResponse newResponse = + BlockingResponseHelper.handleFinishForWaf(span, response); + if (newResponse != response) { + span.getRequestContext().getTraceSegment().effectivelyBlocked(); + response.entity().discardBytes(materializer); + response = newResponse; + } + + DatadogWrapperHelper.finishSpan(span, response); + return response; + } + }, + new AbstractFunction1() { + @Override + public Throwable apply(final Throwable t) { + DatadogWrapperHelper.finishSpan(span, t); + return t; + } + }, + materializer.executionContext()); scope.close(); return wrapped; } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DatadogServerRequestResponseFlowWrapper.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DatadogServerRequestResponseFlowWrapper.java index f168a91ba62..45ce4b88c00 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DatadogServerRequestResponseFlowWrapper.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DatadogServerRequestResponseFlowWrapper.java @@ -13,7 +13,10 @@ import akka.stream.stage.AbstractOutHandler; import akka.stream.stage.GraphStage; import akka.stream.stage.GraphStageLogic; +import datadog.trace.api.gateway.RequestContext; import datadog.trace.bootstrap.instrumentation.api.AgentScope; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.instrumentation.akkahttp.appsec.BlockingResponseHelper; import java.util.Queue; import java.util.concurrent.ArrayBlockingQueue; @@ -54,6 +57,7 @@ public GraphStageLogic createLogic(final Attributes inheritedAttributes) throws // close the span at the front of the queue when we receive the response // from the user code, since it will match up to the request for that span. final Queue scopes = new ArrayBlockingQueue<>(pipeliningLimit); + boolean[] skipNextPull = new boolean[] {false}; // This is where the request comes in from the server and TCP layer setHandler( @@ -63,6 +67,23 @@ public GraphStageLogic createLogic(final Attributes inheritedAttributes) throws public void onPush() throws Exception { final HttpRequest request = grab(requestInlet); final AgentScope scope = DatadogWrapperHelper.createSpan(request); + AgentSpan span = scope.span(); + RequestContext requestContext = span.getRequestContext(); + if (requestContext != null) { + HttpResponse response = + BlockingResponseHelper.maybeCreateBlockingResponse(span, request); + if (response != null) { + request.discardEntityBytes(materializer()); + skipNextPull[0] = true; + requestContext.getTraceSegment().effectivelyBlocked(); + emit(responseOutlet, response); + DatadogWrapperHelper.finishSpan(scope.span(), response); + pull(requestInlet); + scope.close(); + return; + } + } + scopes.add(scope); push(requestOutlet, request); // Since we haven't instrumented the akka stream state machine, we can't rely @@ -109,10 +130,18 @@ public void onDownstreamFinish() throws Exception { new AbstractInHandler() { @Override public void onPush() throws Exception { - final HttpResponse response = grab(responseInlet); + HttpResponse response = grab(responseInlet); final AgentScope scope = scopes.poll(); if (scope != null) { - DatadogWrapperHelper.finishSpan(scope.span(), response); + AgentSpan span = scope.span(); + HttpResponse newResponse = + BlockingResponseHelper.handleFinishForWaf(span, response); + if (newResponse != response) { + span.getRequestContext().getTraceSegment().effectivelyBlocked(); + response.discardEntityBytes(materializer()); + response = newResponse; + } + DatadogWrapperHelper.finishSpan(span, response); // Check if the active scope is still the scope from when the request came in, // and close it. If it's not, then it will be cleaned up actor message // processing instrumentation that drives this state machine @@ -160,7 +189,17 @@ public void onUpstreamFailure(final Throwable ex) throws Exception { new AbstractOutHandler() { @Override public void onPull() throws Exception { - pull(responseInlet); + if (isClosed(responseInlet)) { + fail(responseOutlet, new RuntimeException("Failed earlier")); + } + // condition is needed when we emit() directly to the outlet + // The value was not pushed through the response inlet, so we need not + // request more data through the inlet + if (skipNextPull[0]) { + skipNextPull[0] = false; + } else { + pull(responseInlet); + } } @Override diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DefaultExceptionHandlerInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DefaultExceptionHandlerInstrumentation.java new file mode 100644 index 00000000000..5eaabbf87dd --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/DefaultExceptionHandlerInstrumentation.java @@ -0,0 +1,51 @@ +package datadog.trace.instrumentation.akkahttp; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import akka.http.scaladsl.server.ExceptionHandler; +import akka.http.scaladsl.server.ExceptionHandler$; +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import net.bytebuddy.asm.Advice; + +@AutoService(Instrumenter.class) +public class DefaultExceptionHandlerInstrumentation extends Instrumenter.AppSec + implements Instrumenter.ForSingleType { + public DefaultExceptionHandlerInstrumentation() { + super("akka-http", "akka-http-server"); + } + + @Override + public String instrumentedType() { + return "akka.http.scaladsl.server.ExceptionHandler$"; + } + + @Override + public String[] helperClassNames() { + return new String[] { + packageName + ".MarkSpanAsErroredPF", + }; + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isMethod() + .and(returns(named("akka.http.scaladsl.server.ExceptionHandler"))) + .and(takesArguments(1)) + .and(takesArgument(0, named("akka.http.scaladsl.settings.RoutingSettings"))), + DefaultExceptionHandlerInstrumentation.class.getName() + "$DefaultHandlerAdvice"); + } + + static class DefaultHandlerAdvice { + @Advice.OnMethodExit(suppress = Throwable.class) + static void after( + @Advice.This ExceptionHandler$ eh, @Advice.Return(readOnly = false) ExceptionHandler ret) { + ret = eh.apply(MarkSpanAsErroredPF.INSTANCE).withFallback(ret); + } + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/MarkSpanAsErroredPF.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/MarkSpanAsErroredPF.java new file mode 100644 index 00000000000..5709c5025cd --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/MarkSpanAsErroredPF.java @@ -0,0 +1,31 @@ +package datadog.trace.instrumentation.akkahttp; + +import akka.http.scaladsl.server.RequestContext; +import akka.http.scaladsl.server.RouteResult; +import akka.japi.JavaPartialFunction; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; +import scala.Function1; +import scala.concurrent.Future; + +/** + * Runs before the default exception handler in {@link + * akka.http.scaladsl.server.ExceptionHandler$#default}, which usually completes with a 500, that + * the exception may be recorded. + */ +public class MarkSpanAsErroredPF + extends JavaPartialFunction>> { + public static final JavaPartialFunction INSTANCE = new MarkSpanAsErroredPF(); + + private MarkSpanAsErroredPF() {} + + @Override + public Function1> apply(Throwable x, boolean isCheck) + throws Exception, Exception { + AgentSpan agentSpan = AgentTracer.activeSpan(); + if (agentSpan != null) { + agentSpan.addThrowable(x); + } + throw noMatch(); + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/RecoverFromBlockedExceptionPF.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/RecoverFromBlockedExceptionPF.java new file mode 100644 index 00000000000..96bfb55194d --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/RecoverFromBlockedExceptionPF.java @@ -0,0 +1,52 @@ +package datadog.trace.instrumentation.akkahttp; + +import akka.http.scaladsl.model.HttpEntity$; +import akka.http.scaladsl.model.HttpProtocols; +import akka.http.scaladsl.model.HttpResponse; +import akka.http.scaladsl.model.StatusCode; +import akka.http.scaladsl.util.FastFuture$; +import akka.japi.JavaPartialFunction; +import datadog.appsec.api.blocking.BlockingException; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; +import scala.PartialFunction; +import scala.collection.immutable.List$; +import scala.compat.java8.JFunction1; +import scala.concurrent.Future; + +public class RecoverFromBlockedExceptionPF extends JavaPartialFunction { + public static final PartialFunction INSTANCE = + new RecoverFromBlockedExceptionPF(); + public static final PartialFunction> INSTANCE_FUTURE; + + static { + JFunction1> f = RecoverFromBlockedExceptionPF::valueToFuture; + INSTANCE_FUTURE = INSTANCE.andThen(f); + } + + @Override + public HttpResponse apply(Throwable x, boolean isCheck) throws Exception { + if (x instanceof BlockingException) { + if (isCheck) { + return null; + } + AgentSpan agentSpan = AgentTracer.activeSpan(); + if (agentSpan != null) { + agentSpan.addThrowable(x); + } + + // will be replaced anyway + return new HttpResponse( + StatusCode.int2StatusCode(500), + List$.MODULE$.empty(), + HttpEntity$.MODULE$.Empty(), + HttpProtocols.HTTP$div1$u002E1()); + } else { + throw noMatch(); + } + } + + private static Future valueToFuture(V value) { + return FastFuture$.MODULE$.successful().apply(value); + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/AkkaBlockResponseFunction.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/AkkaBlockResponseFunction.java new file mode 100644 index 00000000000..7421a8f9e01 --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/AkkaBlockResponseFunction.java @@ -0,0 +1,69 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import akka.http.scaladsl.model.HttpRequest; +import akka.http.scaladsl.model.HttpResponse; +import datadog.appsec.api.blocking.BlockingContentType; +import datadog.trace.api.gateway.BlockResponseFunction; +import datadog.trace.api.gateway.Flow; +import datadog.trace.api.internal.TraceSegment; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; +import java.util.Map; + +/** + * This block response function only saves the request blocking action. Usually the blocking request + * function directly commits a response. + * + * @see BlockingResponseHelper#handleFinishForWaf(AgentSpan, HttpResponse) + */ +public class AkkaBlockResponseFunction implements BlockResponseFunction { + private final HttpRequest request; + private Flow.Action.RequestBlockingAction rba; + private boolean unmarshallBlock; + private TraceSegment traceSegment; + + public AkkaBlockResponseFunction(HttpRequest request) { + this.request = request; + } + + public boolean isBlocking() { + return rba != null; + } + + public boolean isUnmarshallBlock() { + return unmarshallBlock; + } + + public void setUnmarshallBlock(boolean unmarshallBlock) { + this.unmarshallBlock = unmarshallBlock; + } + + public HttpResponse maybeCreateAlternativeResponse() { + if (!isBlocking()) { + return null; + } + + HttpResponse httpResponse = BlockingResponseHelper.maybeCreateBlockingResponse(rba, request); + if (httpResponse != null) { + traceSegment.effectivelyBlocked(); + } + return httpResponse; + } + + @Override + public boolean tryCommitBlockingResponse( + TraceSegment segment, + int statusCode, + BlockingContentType templateType, + Map extraHeaders) { + AgentSpan agentSpan = AgentTracer.activeSpan(); + if (agentSpan == null) { + return false; + } + if (rba == null) { + rba = new Flow.Action.RequestBlockingAction(statusCode, templateType, extraHeaders); + this.traceSegment = segment; + } + return true; + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/BlockingResponseHelper.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/BlockingResponseHelper.java new file mode 100644 index 00000000000..046d6c96bdb --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/BlockingResponseHelper.java @@ -0,0 +1,107 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static datadog.trace.instrumentation.akkahttp.AkkaHttpServerDecorator.DECORATE; + +import akka.http.javadsl.model.HttpHeader; +import akka.http.javadsl.model.headers.RawHeader; +import akka.http.scaladsl.model.ContentTypes; +import akka.http.scaladsl.model.HttpEntity$; +import akka.http.scaladsl.model.HttpRequest; +import akka.http.scaladsl.model.HttpResponse; +import akka.http.scaladsl.model.ResponseEntity; +import akka.http.scaladsl.model.StatusCode; +import akka.http.scaladsl.model.StatusCodes; +import akka.util.ByteString; +import datadog.appsec.api.blocking.BlockingContentType; +import datadog.trace.api.gateway.BlockResponseFunction; +import datadog.trace.api.gateway.Flow; +import datadog.trace.api.gateway.RequestContext; +import datadog.trace.bootstrap.blocking.BlockingActionHelper; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.instrumentation.akkahttp.AkkaHttpServerHeaders; +import java.util.Optional; +import scala.collection.immutable.List; + +public class BlockingResponseHelper { + private BlockingResponseHelper() {} + + public static HttpResponse handleFinishForWaf(final AgentSpan span, final HttpResponse response) { + RequestContext requestContext = span.getRequestContext(); + BlockResponseFunction brf = requestContext.getBlockResponseFunction(); + if (brf instanceof AkkaBlockResponseFunction) { + HttpResponse altResponse = ((AkkaBlockResponseFunction) brf).maybeCreateAlternativeResponse(); + if (altResponse != null) { + // we already blocked during the request + return altResponse; + } + } + Flow flow = + DECORATE.callIGCallbackResponseAndHeaders( + span, response, response.status().intValue(), AkkaHttpServerHeaders.responseGetter()); + Flow.Action action = flow.getAction(); + if (action instanceof Flow.Action.RequestBlockingAction) { + Flow.Action.RequestBlockingAction rba = (Flow.Action.RequestBlockingAction) action; + if (brf instanceof AkkaBlockResponseFunction) { + brf.tryCommitBlockingResponse( + requestContext.getTraceSegment(), + rba.getStatusCode(), + rba.getBlockingContentType(), + rba.getExtraHeaders()); + HttpResponse altResponse = + ((AkkaBlockResponseFunction) brf).maybeCreateAlternativeResponse(); + if (altResponse != null) { + return altResponse; + } + } + } + + return response; + } + + public static HttpResponse maybeCreateBlockingResponse(AgentSpan span, HttpRequest request) { + return maybeCreateBlockingResponse(span.getRequestBlockingAction(), request); + } + + public static HttpResponse maybeCreateBlockingResponse( + Flow.Action.RequestBlockingAction rba, HttpRequest request) { + if (rba == null) { + return null; + } + Optional accept = request.getHeader("accept"); + BlockingContentType bct = rba.getBlockingContentType(); + int httpCode = BlockingActionHelper.getHttpCode(rba.getStatusCode()); + ResponseEntity entity; + if (bct != BlockingContentType.NONE) { + BlockingActionHelper.TemplateType tt = + BlockingActionHelper.determineTemplateType(bct, accept.map(h -> h.value()).orElse(null)); + byte[] template = BlockingActionHelper.getTemplate(tt); + if (tt == BlockingActionHelper.TemplateType.HTML) { + entity = + HttpEntity$.MODULE$.apply( + ContentTypes.text$divhtml$u0028UTF$minus8$u0029(), ByteString.fromArray(template)); + } else { // json + entity = + HttpEntity$.MODULE$.apply( + ContentTypes.application$divjson(), ByteString.fromArray(template)); + } + } else { + entity = HttpEntity$.MODULE$.Empty(); + } + + List headersList = + rba.getExtraHeaders().entrySet().stream() + .map( + e -> + (akka.http.scaladsl.model.HttpHeader) + RawHeader.create(e.getKey(), e.getValue())) + .collect(ScalaListCollector.toScalaList()); + + StatusCode code; + try { + code = StatusCode.int2StatusCode(httpCode); + } catch (RuntimeException e) { + code = StatusCodes.custom(httpCode, "Request Blocked", "", false, true); + } + return HttpResponse.apply(code, headersList, entity, request.protocol()); + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/Bug4304Instrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/Bug4304Instrumentation.java new file mode 100644 index 00000000000..22c4f2bbba1 --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/Bug4304Instrumentation.java @@ -0,0 +1,113 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.HierarchyMatchers.declaresField; +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.nameStartsWith; +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan; +import static net.bytebuddy.matcher.ElementMatchers.isConstructor; + +import akka.stream.stage.GraphStageLogic; +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.agent.tooling.bytebuddy.matcher.HierarchyMatchers; +import datadog.trace.agent.tooling.muzzle.Reference; +import datadog.trace.api.gateway.BlockResponseFunction; +import datadog.trace.api.gateway.RequestContext; +import datadog.trace.api.gateway.RequestContextSlot; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import java.lang.reflect.Field; +import java.util.regex.Pattern; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; + +/** See https://github.com/akka/akka-http/issues/4304 */ +@AutoService(Instrumenter.class) +public class Bug4304Instrumentation extends Instrumenter.AppSec + implements Instrumenter.ForTypeHierarchy, Instrumenter.WithTypeStructure { + public Bug4304Instrumentation() { + super("akka-http"); + } + + @Override + public String hierarchyMarkerType() { + return "akka.http.impl.engine.server.HttpServerBluePrint"; + } + + @Override + public String[] helperClassNames() { + return new String[] { + packageName + ".AkkaBlockResponseFunction", + packageName + ".BlockingResponseHelper", + packageName + ".ScalaListCollector", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerDecorator", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerHeaders", + "datadog.trace.instrumentation.akkahttp.UriAdapter", + }; + } + + @Override + public Reference[] additionalMuzzleReferences() { + return ScalaListCollectorMuzzleReferences.additionalMuzzleReferences(); + } + + @Override + public ElementMatcher hierarchyMatcher() { + return nameStartsWith("akka.http.impl.engine.server.HttpServerBluePrint$ControllerStage$$anon$") + .and(HierarchyMatchers.extendsClass(named("akka.stream.stage.GraphStageLogic"))) + .and(MatchesOneHundredContinueStageAnonClass.INSTANCE); + } + + public static class MatchesOneHundredContinueStageAnonClass + implements ElementMatcher { + public static final ElementMatcher INSTANCE = + new MatchesOneHundredContinueStageAnonClass(); + + private MatchesOneHundredContinueStageAnonClass() {} + + private static final Pattern ANON_CLASS_PATTERN = + Pattern.compile( + "akka\\.http\\.impl\\.engine\\.server\\.HttpServerBluePrint\\$ControllerStage\\$\\$anon\\$" + + "\\d+\\$OneHundredContinueStage\\$\\$anon\\$\\d+"); + + @Override + public boolean matches(TypeDescription td) { + return ANON_CLASS_PATTERN.matcher(td.getName()).matches(); + } + } + + @Override + public ElementMatcher structureMatcher() { + return declaresField(named("oneHundredContinueSent")); + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isConstructor(), Bug4304Instrumentation.class.getName() + "$GraphStageLogicAdvice"); + } + + static class GraphStageLogicAdvice { + @Advice.OnMethodExit(suppress = Throwable.class) + static void after(@Advice.This GraphStageLogic thiz) + throws NoSuchFieldException, IllegalAccessException { + AgentSpan span = activeSpan(); + RequestContext reqCtx; + if (span == null + || (reqCtx = span.getRequestContext()) == null + || reqCtx.getData(RequestContextSlot.APPSEC) == null) { + return; + } + + BlockResponseFunction brf = reqCtx.getBlockResponseFunction(); + if (brf instanceof AkkaBlockResponseFunction) { + AkkaBlockResponseFunction abrf = (AkkaBlockResponseFunction) brf; + if (abrf.isBlocking() && abrf.isUnmarshallBlock()) { + Field f = thiz.getClass().getDeclaredField("oneHundredContinueSent"); + f.setAccessible(true); + f.set(thiz, true); + } + } + } + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/ConfigProvideRemoteAddressHeaderInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/ConfigProvideRemoteAddressHeaderInstrumentation.java new file mode 100644 index 00000000000..db05a1d73f1 --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/ConfigProvideRemoteAddressHeaderInstrumentation.java @@ -0,0 +1,52 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.isPublic; +import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import net.bytebuddy.asm.Advice; + +@AutoService(Instrumenter.class) +public class ConfigProvideRemoteAddressHeaderInstrumentation extends Instrumenter.AppSec + implements Instrumenter.ForSingleType { + public ConfigProvideRemoteAddressHeaderInstrumentation() { + super("akka-http"); + } + + @Override + public String instrumentedType() { + return "com.typesafe.config.impl.SimpleConfig"; + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isPublic() + .and(named("getBoolean")) + .and(takesArguments(1)) + .and(takesArgument(0, String.class)) + .and(returns(boolean.class)), + ConfigProvideRemoteAddressHeaderInstrumentation.class.getName() + + "$EnableRemoteAddressHeaderAdvice"); + } + + static class EnableRemoteAddressHeaderAdvice { + @Advice.OnMethodEnter(suppress = Throwable.class, skipOn = Advice.OnNonDefaultValue.class) + static boolean enter(@Advice.Argument(0) String configName) { + // ideally we'd use remote-address-attribute, but that's only available on 10.2, + // and doesn't work on http/2 until 10.2.3 + return "remote-address-header".equals(configName); + } + + @Advice.OnMethodExit(suppress = Throwable.class) + static void exit(@Advice.Enter boolean enter, @Advice.Return(readOnly = false) boolean ret) { + if (enter) { + ret = true; + } + } + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/FormDataToStrictInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/FormDataToStrictInstrumentation.java new file mode 100644 index 00000000000..85904233c11 --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/FormDataToStrictInstrumentation.java @@ -0,0 +1,66 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.isStatic; +import static net.bytebuddy.matcher.ElementMatchers.not; +import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import akka.stream.Materializer; +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import net.bytebuddy.asm.Advice; +import scala.concurrent.duration.FiniteDuration; + +/** @see akka.http.scaladsl.model.Multipart.FormData#toStrict(FiniteDuration, Materializer) */ +@AutoService(Instrumenter.class) +public class FormDataToStrictInstrumentation extends Instrumenter.AppSec + implements Instrumenter.ForSingleType, ScalaListCollectorMuzzleReferences { + public FormDataToStrictInstrumentation() { + super("akka-http"); + } + + @Override + public String[] helperClassNames() { + return new String[] { + packageName + ".UnmarshallerHelpers", + packageName + ".UnmarshallerHelpers$UnmarkStrictFormOngoingOnUnsupportedException", + packageName + ".AkkaBlockResponseFunction", + packageName + ".BlockingResponseHelper", + packageName + ".ScalaListCollector", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerDecorator", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerHeaders", + "datadog.trace.instrumentation.akkahttp.UriAdapter", + }; + } + + @Override + public String instrumentedType() { + return "akka.http.scaladsl.model.Multipart$FormData"; + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isMethod() + .and(not(isStatic())) + .and(named("toStrict")) + .and(takesArguments(2)) + .and(takesArgument(0, named("scala.concurrent.duration.FiniteDuration"))) + .and(takesArgument(1, named("akka.stream.Materializer"))) + .and(returns(named("scala.concurrent.Future"))), + FormDataToStrictInstrumentation.class.getName() + "$ToStrictAdvice"); + } + + static class ToStrictAdvice { + @Advice.OnMethodExit(suppress = Throwable.class) + static void before( + @Advice.Return(readOnly = false) + scala.concurrent.Future fut, + @Advice.Argument(1) Materializer mat) { + fut = UnmarshallerHelpers.transformMultiPartFormDataToStrictFuture(fut, mat); + } + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/JacksonUnmarshallerInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/JacksonUnmarshallerInstrumentation.java new file mode 100644 index 00000000000..c63e7e4bcaa --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/JacksonUnmarshallerInstrumentation.java @@ -0,0 +1,67 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.isStatic; +import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import akka.http.javadsl.unmarshalling.Unmarshaller; +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.agent.tooling.muzzle.Reference; +import net.bytebuddy.asm.Advice; + +@AutoService(Instrumenter.class) +public class JacksonUnmarshallerInstrumentation extends Instrumenter.AppSec + implements Instrumenter.ForSingleType { + + public JacksonUnmarshallerInstrumentation() { + super("akka-http"); + } + + @Override + public String[] helperClassNames() { + return new String[] { + packageName + ".UnmarshallerHelpers", + packageName + ".UnmarshallerHelpers$UnmarkStrictFormOngoingOnUnsupportedException", + packageName + ".AkkaBlockResponseFunction", + packageName + ".BlockingResponseHelper", + packageName + ".ScalaListCollector", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerDecorator", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerHeaders", + "datadog.trace.instrumentation.akkahttp.UriAdapter", + }; + } + + @Override + public Reference[] additionalMuzzleReferences() { + return ScalaListCollectorMuzzleReferences.additionalMuzzleReferences(); + } + + @Override + public String instrumentedType() { + return "akka.http.javadsl.marshallers.jackson.Jackson"; + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isMethod() + .and(isStatic()) + .and(returns(named("akka.http.javadsl.unmarshalling.Unmarshaller"))) + .and(named("byteStringUnmarshaller").or(named("unmarshaller"))) + .and(takesArguments(2)) + .and(takesArgument(0, named("com.fasterxml.jackson.databind.ObjectMapper"))) + .and(takesArgument(1, Class.class)), + JacksonUnmarshallerInstrumentation.class.getName() + "$UnmarshallerAdvice"); + } + + static class UnmarshallerAdvice { + @Advice.OnMethodExit(suppress = Throwable.class) + static void after(@Advice.Return(readOnly = false) Unmarshaller ret) { + ret = UnmarshallerHelpers.transformJacksonUnmarshaller(ret); + } + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/MultipartUnmarshallersInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/MultipartUnmarshallersInstrumentation.java new file mode 100644 index 00000000000..ab6e90c1517 --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/MultipartUnmarshallersInstrumentation.java @@ -0,0 +1,64 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static datadog.trace.agent.tooling.bytebuddy.matcher.ScalaTraitMatchers.isTraitMethod; +import static net.bytebuddy.matcher.ElementMatchers.returns; + +import akka.http.scaladsl.unmarshalling.MultipartUnmarshallers; +import akka.http.scaladsl.unmarshalling.Unmarshaller; +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import net.bytebuddy.asm.Advice; + +/** @see MultipartUnmarshallers */ +@AutoService(Instrumenter.class) +public class MultipartUnmarshallersInstrumentation extends Instrumenter.AppSec + implements Instrumenter.ForKnownTypes { + + private static final String TRAIT_NAME = + "akka.http.scaladsl.unmarshalling.MultipartUnmarshallers"; + + public MultipartUnmarshallersInstrumentation() { + super("akka-http"); + } + + @Override + public String[] helperClassNames() { + return new String[] { + packageName + ".UnmarshallerHelpers", + packageName + ".UnmarshallerHelpers$UnmarkStrictFormOngoingOnUnsupportedException", + packageName + ".AkkaBlockResponseFunction", + packageName + ".BlockingResponseHelper", + packageName + ".ScalaListCollector", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerDecorator", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerHeaders", + "datadog.trace.instrumentation.akkahttp.UriAdapter", + }; + } + + @Override + public String[] knownMatchingTypes() { + return new String[] { + TRAIT_NAME, TRAIT_NAME + "$class", + }; + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isTraitMethod( + TRAIT_NAME, + "multipartFormDataUnmarshaller", + "akka.event.LoggingAdapter", + "akka.http.scaladsl.settings.ParserSettings") + .and(returns(named("akka.http.scaladsl.unmarshalling.Unmarshaller"))), + MultipartUnmarshallersInstrumentation.class.getName() + "$UnmarshallerWrappingAdvice"); + } + + static class UnmarshallerWrappingAdvice { + @Advice.OnMethodExit(suppress = Throwable.class) + static void after(@Advice.Return(readOnly = false) Unmarshaller unmarshaller) { + unmarshaller = UnmarshallerHelpers.transformMultipartFormDataUnmarshaller(unmarshaller); + } + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/PredefinedFromEntityUnmarshallersInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/PredefinedFromEntityUnmarshallersInstrumentation.java new file mode 100644 index 00000000000..ab01b6c94a3 --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/PredefinedFromEntityUnmarshallersInstrumentation.java @@ -0,0 +1,89 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.namedOneOf; +import static datadog.trace.agent.tooling.bytebuddy.matcher.ScalaTraitMatchers.isTraitMethod; +import static net.bytebuddy.matcher.ElementMatchers.returns; + +import akka.http.scaladsl.unmarshalling.PredefinedFromEntityUnmarshallers; +import akka.http.scaladsl.unmarshalling.Unmarshaller; +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.agent.tooling.muzzle.Reference; +import net.bytebuddy.asm.Advice; +import scala.collection.Seq; + +/** + * @see PredefinedFromEntityUnmarshallers#urlEncodedFormDataUnmarshaller(Seq) + * @see PredefinedFromEntityUnmarshallers#stringUnmarshaller() + */ +@AutoService(Instrumenter.class) +public class PredefinedFromEntityUnmarshallersInstrumentation extends Instrumenter.AppSec + implements Instrumenter.ForKnownTypes { + + private static final String TRAIT_NAME = + "akka.http.scaladsl.unmarshalling.PredefinedFromEntityUnmarshallers"; + + public PredefinedFromEntityUnmarshallersInstrumentation() { + super("akka-http"); + } + + @Override + public String[] helperClassNames() { + return new String[] { + packageName + ".UnmarshallerHelpers", + packageName + ".UnmarshallerHelpers$UnmarkStrictFormOngoingOnUnsupportedException", + packageName + ".AkkaBlockResponseFunction", + packageName + ".BlockingResponseHelper", + packageName + ".ScalaListCollector", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerDecorator", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerHeaders", + "datadog.trace.instrumentation.akkahttp.UriAdapter", + }; + } + + @Override + public Reference[] additionalMuzzleReferences() { + return ScalaListCollectorMuzzleReferences.additionalMuzzleReferences(); + } + + @Override + public String[] knownMatchingTypes() { + return new String[] { + TRAIT_NAME, TRAIT_NAME + "$class", + }; + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isTraitMethod( + TRAIT_NAME, + "urlEncodedFormDataUnmarshaller", + namedOneOf("scala.collection.Seq", "scala.collection.immutable.Seq")) + .and(returns(named("akka.http.scaladsl.unmarshalling.Unmarshaller"))), + PredefinedFromEntityUnmarshallersInstrumentation.class.getName() + + "$UrlEncodedUnmarshallerWrappingAdvice"); + transformation.applyAdvice( + isTraitMethod(TRAIT_NAME, "stringUnmarshaller") + .and(returns(named("akka.http.scaladsl.unmarshalling.Unmarshaller"))), + PredefinedFromEntityUnmarshallersInstrumentation.class.getName() + + "$StringUnmarshallerWrappingAdvice"); + } + + static class UrlEncodedUnmarshallerWrappingAdvice { + @Advice.OnMethodExit(suppress = Throwable.class) + static void after(@Advice.Return(readOnly = false) Unmarshaller unmarshaller) { + unmarshaller = UnmarshallerHelpers.transformUrlEncodedUnmarshaller(unmarshaller); + } + } + + static class StringUnmarshallerWrappingAdvice { + @Advice.OnMethodExit(suppress = Throwable.class) + static void after( + @Advice.Return(readOnly = false) + Unmarshaller unmarshaller) { + unmarshaller = UnmarshallerHelpers.transformStringUnmarshaller(unmarshaller); + } + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/ScalaListCollector.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/ScalaListCollector.java new file mode 100644 index 00000000000..6410d125c5d --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/ScalaListCollector.java @@ -0,0 +1,108 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static java.lang.invoke.MethodHandles.lookup; +import static java.lang.invoke.MethodType.methodType; + +import java.lang.invoke.MethodHandle; +import java.util.Collections; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.BinaryOperator; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collector; +import scala.collection.immutable.List; +import scala.collection.mutable.ListBuffer; + +public class ScalaListCollector implements Collector, List> { + + private static final Collector INSTANCE_TO_LIST; + private static final MethodHandle PLUS_EQ; + private static final MethodHandle PLUS_PLUS_EQ; + + static { + ClassLoader classLoader = ScalaListCollector.class.getClassLoader(); + if (classLoader == null) { + classLoader = ClassLoader.getSystemClassLoader(); + } + + MethodHandle plusEq; + MethodHandle plusPlusEq; + try { + plusEq = + lookup() + .findVirtual( + ListBuffer.class, "$plus$eq", methodType(ListBuffer.class, Object.class)); + Class traversableOnceCls = classLoader.loadClass("scala.collection.TraversableOnce"); + plusPlusEq = + lookup() + .findVirtual( + ListBuffer.class, + "$plus$plus$eq", + methodType(ListBuffer.class, traversableOnceCls)); + } catch (ClassNotFoundException | NoSuchMethodException | IllegalAccessException e) { + try { + plusEq = + lookup() + .findVirtual( + ListBuffer.class, "addOne", methodType(ListBuffer.class, Object.class)); + Class iterableOnceCls = classLoader.loadClass("scala.collection.IterableOnce"); + plusPlusEq = + lookup() + .findVirtual( + ListBuffer.class, "addAll", methodType(ListBuffer.class, iterableOnceCls)); + } catch (NoSuchMethodException | IllegalAccessException | ClassNotFoundException ex) { + throw new RuntimeException(ex); + } + } + + PLUS_EQ = plusEq; + PLUS_PLUS_EQ = plusPlusEq; + INSTANCE_TO_LIST = new ScalaListCollector(); + } + + public static Collector> toScalaList() { + return INSTANCE_TO_LIST; + } + + private static ListBuffer addOne(ListBuffer list, T object) { + try { + return (ListBuffer) PLUS_EQ.invoke(list, object); + } catch (Throwable e) { + throw new RuntimeException(e); + } + } + + private static ListBuffer addAll(ListBuffer list, ListBuffer otherList) { + try { + return (ListBuffer) PLUS_PLUS_EQ.invoke(list, otherList); + } catch (Throwable e) { + throw new RuntimeException(e); + } + } + + @Override + public Supplier> supplier() { + return scala.collection.mutable.ListBuffer::new; + } + + @Override + public BiConsumer, T> accumulator() { + return ScalaListCollector::addOne; + } + + @Override + public BinaryOperator> combiner() { + return ScalaListCollector::addAll; + } + + @Override + public Function, List> finisher() { + return scala.collection.mutable.ListBuffer::toList; + } + + @Override + public Set characteristics() { + return Collections.emptySet(); + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/ScalaListCollectorMuzzleReferences.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/ScalaListCollectorMuzzleReferences.java new file mode 100644 index 00000000000..08f73d2c7fa --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/ScalaListCollectorMuzzleReferences.java @@ -0,0 +1,38 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import datadog.trace.agent.tooling.muzzle.Reference; + +public interface ScalaListCollectorMuzzleReferences { + Reference SCALA_LIST_COLLECTOR = + new Reference.Builder("scala.collection.mutable.ListBuffer") + .withMethod( + new String[0], + Reference.EXPECTS_NON_STATIC | Reference.EXPECTS_PUBLIC, + "$plus$eq", + "Lscala/collection/mutable/ListBuffer;", + "Ljava/lang/Object;") + .withMethod( + new String[0], + Reference.EXPECTS_NON_STATIC | Reference.EXPECTS_PUBLIC, + "$plus$plus$eq", + "Lscala/collection/mutable/ListBuffer;", + "Lscala/collection/TraversableOnce;") + .or() + .withMethod( + new String[0], + Reference.EXPECTS_NON_STATIC | Reference.EXPECTS_PUBLIC, + "addOne", + "Lscala/collection/mutable/ListBuffer;", + "Ljava/lang/Object;") + .withMethod( + new String[0], + Reference.EXPECTS_NON_STATIC | Reference.EXPECTS_PUBLIC, + "addAll", + "Lscala/collection/mutable/ListBuffer;", + "Lscala/collection/IterableOnce;") + .build(); + + static Reference[] additionalMuzzleReferences() { + return new Reference[] {SCALA_LIST_COLLECTOR}; + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/SprayUnmarshallerInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/SprayUnmarshallerInstrumentation.java new file mode 100644 index 00000000000..2e5aa79515f --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/SprayUnmarshallerInstrumentation.java @@ -0,0 +1,73 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static datadog.trace.agent.tooling.bytebuddy.matcher.ScalaTraitMatchers.isTraitMethod; +import static net.bytebuddy.matcher.ElementMatchers.returns; + +import akka.http.scaladsl.unmarshalling.Unmarshaller; +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.agent.tooling.muzzle.Reference; +import net.bytebuddy.asm.Advice; + +// TODO: move to separate module and have better support +@AutoService(Instrumenter.class) +public class SprayUnmarshallerInstrumentation extends Instrumenter.AppSec + implements Instrumenter.ForKnownTypes { + + private static final String TRAIT_NAME = + "akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport"; + + public SprayUnmarshallerInstrumentation() { + super("akka-http"); + } + + @Override + public String[] knownMatchingTypes() { + return new String[] { + TRAIT_NAME, TRAIT_NAME + "$class", + }; + } + + @Override + public String[] helperClassNames() { + return new String[] { + packageName + ".UnmarshallerHelpers", + packageName + ".UnmarshallerHelpers$UnmarkStrictFormOngoingOnUnsupportedException", + packageName + ".AkkaBlockResponseFunction", + packageName + ".BlockingResponseHelper", + packageName + ".ScalaListCollector", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerDecorator", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerHeaders", + "datadog.trace.instrumentation.akkahttp.UriAdapter", + }; + } + + @Override + public Reference[] additionalMuzzleReferences() { + return ScalaListCollectorMuzzleReferences.additionalMuzzleReferences(); + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isTraitMethod(TRAIT_NAME, "sprayJsonUnmarshaller", "spray.json.RootJsonReader") + .and(returns(named("akka.http.scaladsl.unmarshalling.Unmarshaller"))) + .or( + isTraitMethod( + TRAIT_NAME, "sprayJsonByteStringUnmarshaller", "spray.json.RootJsonReader") + .and(returns(named("akka.http.scaladsl.unmarshalling.Unmarshaller")))), + SprayUnmarshallerInstrumentation.class.getName() + "$ArbitraryTypeAdvice"); + // support is basic: + // * Source[T, NotUsed] is not intercepted + // * neither is the conversion into JsValue. It would need to wrap the JsValue + // to intercept calls to the methods in play.api.libs.json.JsReadable + } + + static class ArbitraryTypeAdvice { + @Advice.OnMethodExit(suppress = Throwable.class) + static void after(@Advice.Return(readOnly = false) Unmarshaller ret) { + ret = UnmarshallerHelpers.transformArbitrarySprayUnmarshaller(ret); + } + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/StrictFormCompanionInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/StrictFormCompanionInstrumentation.java new file mode 100644 index 00000000000..c7842cf8bb2 --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/StrictFormCompanionInstrumentation.java @@ -0,0 +1,70 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.isStatic; +import static net.bytebuddy.matcher.ElementMatchers.not; +import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import akka.http.scaladsl.common.StrictForm; +import akka.http.scaladsl.model.HttpEntity; +import akka.http.scaladsl.unmarshalling.Unmarshaller; +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.agent.tooling.muzzle.Reference; +import net.bytebuddy.asm.Advice; + +/** @see akka.http.scaladsl.common.StrictForm$#unmarshaller(Unmarshaller, Unmarshaller) */ +@AutoService(Instrumenter.class) +public class StrictFormCompanionInstrumentation extends Instrumenter.AppSec + implements Instrumenter.ForSingleType { + public StrictFormCompanionInstrumentation() { + super("akka-http"); + } + + @Override + public String instrumentedType() { + return "akka.http.scaladsl.common.StrictForm$"; + } + + @Override + public String[] helperClassNames() { + return new String[] { + packageName + ".UnmarshallerHelpers", + packageName + ".UnmarshallerHelpers$UnmarkStrictFormOngoingOnUnsupportedException", + packageName + ".AkkaBlockResponseFunction", + packageName + ".BlockingResponseHelper", + packageName + ".ScalaListCollector", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerDecorator", + "datadog.trace.instrumentation.akkahttp.AkkaHttpServerHeaders", + "datadog.trace.instrumentation.akkahttp.UriAdapter", + }; + } + + @Override + public Reference[] additionalMuzzleReferences() { + return ScalaListCollectorMuzzleReferences.additionalMuzzleReferences(); + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isMethod() + .and(not(isStatic())) + .and(named("unmarshaller")) + .and(returns(named("akka.http.scaladsl.unmarshalling.Unmarshaller"))) + .and(takesArguments(2)) + .and(takesArgument(0, named("akka.http.scaladsl.unmarshalling.Unmarshaller"))) + .and(takesArgument(1, named("akka.http.scaladsl.unmarshalling.Unmarshaller"))), + StrictFormCompanionInstrumentation.class.getName() + "$UnmarshallerAdvice"); + } + + static class UnmarshallerAdvice { + @Advice.OnMethodExit(suppress = Throwable.class) + static void after(@Advice.Return(readOnly = false) Unmarshaller ret) { + ret = UnmarshallerHelpers.transformStrictFormUnmarshaller(ret); + } + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/UnmarshallerHelpers.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/UnmarshallerHelpers.java new file mode 100644 index 00000000000..baa82ec95ca --- /dev/null +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/appsec/UnmarshallerHelpers.java @@ -0,0 +1,487 @@ +package datadog.trace.instrumentation.akkahttp.appsec; + +import static datadog.trace.api.gateway.Events.EVENTS; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan; + +import akka.http.javadsl.model.ContentType; +import akka.http.javadsl.model.MediaType; +import akka.http.javadsl.model.MediaTypes; +import akka.http.scaladsl.common.StrictForm; +import akka.http.scaladsl.model.FormData; +import akka.http.scaladsl.model.HttpEntity; +import akka.http.scaladsl.unmarshalling.Unmarshaller; +import akka.http.scaladsl.unmarshalling.Unmarshaller$; +import akka.japi.JavaPartialFunction; +import akka.stream.Materializer; +import datadog.appsec.api.blocking.BlockingException; +import datadog.trace.api.gateway.BlockResponseFunction; +import datadog.trace.api.gateway.CallbackProvider; +import datadog.trace.api.gateway.Flow; +import datadog.trace.api.gateway.RequestContext; +import datadog.trace.api.gateway.RequestContextSlot; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.WeakHashMap; +import java.util.function.BiFunction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import scala.Function1; +import scala.PartialFunction; +import scala.Tuple2; +import scala.collection.Iterable; +import scala.collection.Iterator; +import scala.compat.java8.JFunction1; +import scala.compat.java8.JFunction2; +import scala.concurrent.ExecutionContext; +import scala.concurrent.Future; + +public class UnmarshallerHelpers { + + public static final int MAX_CONVERSION_DEPTH = 10; + private static final Logger log = LoggerFactory.getLogger(UnmarshallerHelpers.class); + + private static final MediaType APPLICATION_X_WWW_FORM_URLENCODED; + + static { + MediaType t = null; + try { + // subtype of MediaType changes between 10.0 and 10.1 + Field f = MediaTypes.class.getField("APPLICATION_X_WWW_FORM_URLENCODED"); + t = (MediaType) f.get(null); + } catch (NoSuchFieldException | IllegalAccessException e) { + } + APPLICATION_X_WWW_FORM_URLENCODED = t; + } + + private UnmarshallerHelpers() {} + + public static Unmarshaller + transformUrlEncodedUnmarshaller( + Unmarshaller original) { + JFunction1 mapf = + formData -> { + try { + handleFormData(formData); + } catch (Exception e) { + handleException(e, "transformUrlEncodedMarshaller"); + } + + return formData; + }; + + return original.map(mapf); + } + + private static void handleFormData(FormData formData) { + AgentSpan span = activeSpan(); + RequestContext reqCtx; + if (span == null + || (reqCtx = span.getRequestContext()) == null + || reqCtx.getData(RequestContextSlot.APPSEC) == null + || isStrictFormOngoing(span)) { + return; + } + + CallbackProvider cbp = AgentTracer.get().getCallbackProvider(RequestContextSlot.APPSEC); + BiFunction> callback = + cbp.getCallback(EVENTS.requestBodyProcessed()); + if (callback == null) { + return; + } + + Iterator> fieldsIter = formData.fields().iterator(); + Map> conv = new HashMap<>(); + while (fieldsIter.hasNext()) { + Tuple2 pair = fieldsIter.next(); + + String key = pair._1; + List values = conv.get(key); + if (values == null) { + values = new ArrayList<>(); + conv.put(key, values); + } + values.add(pair._2); + } + + if (conv.isEmpty()) { + return; + } + + // callback execution + executeCallback(reqCtx, callback, conv, "urlEncodedFormDataUnmarshaller"); + } + + private static void executeCallback( + RequestContext reqCtx, + BiFunction> callback, + Object conv, + String details) { + Flow flow = callback.apply(reqCtx, conv); + Flow.Action action = flow.getAction(); + if (action instanceof Flow.Action.RequestBlockingAction) { + Flow.Action.RequestBlockingAction rba = (Flow.Action.RequestBlockingAction) action; + BlockResponseFunction blockResponseFunction = reqCtx.getBlockResponseFunction(); + if (blockResponseFunction != null) { + boolean success = + blockResponseFunction.tryCommitBlockingResponse( + reqCtx.getTraceSegment(), + rba.getStatusCode(), + rba.getBlockingContentType(), + rba.getExtraHeaders()); + if (success) { + if (blockResponseFunction instanceof AkkaBlockResponseFunction) { + AkkaBlockResponseFunction abrf = (AkkaBlockResponseFunction) blockResponseFunction; + abrf.setUnmarshallBlock(true); + } + throw new BlockingException("Blocked request (for " + details + ")"); + } + } + } + } + + public static Unmarshaller transformMultipartFormDataUnmarshaller(Unmarshaller original) { + JFunction1< + akka.http.scaladsl.model.Multipart.FormData, + akka.http.scaladsl.model.Multipart.FormData> + mapf = + t -> { + if (!(t instanceof akka.http.scaladsl.model.Multipart$FormData$Strict)) { + // data not loaded yet... + // it's not practical to wrap the object + // rely on instrumentation on toStrict + return t; + } + + try { + handleMultipartStrictFormData( + (akka.http.scaladsl.model.Multipart$FormData$Strict) t); + } catch (Exception e) { + handleException(e, "Error in handleMultipartStrictFormData"); + } + + return t; + }; + + return original.map(mapf); + } + + public static scala.concurrent.Future + transformMultiPartFormDataToStrictFuture( + scala.concurrent.Future future, + Materializer materializer) { + JFunction1< + akka.http.scaladsl.model.Multipart$FormData$Strict, + akka.http.scaladsl.model.Multipart$FormData$Strict> + mapf = + t -> { + try { + AgentSpan span = activeSpan(); + if (span != null && !isStrictFormOngoing(span)) { + handleMultipartStrictFormData(t); + } + } catch (Exception e) { + handleException(e, "Error in transformMultiPartFormDataToStrictFuture"); + } + return t; + }; + return future.map(mapf, materializer.executionContext()); + } + + private static void handleMultipartStrictFormData( + akka.http.scaladsl.model.Multipart$FormData$Strict st) { + AgentSpan span = activeSpan(); + RequestContext reqCtx; + if (span == null + || (reqCtx = span.getRequestContext()) == null + || reqCtx.getData(RequestContextSlot.APPSEC) == null) { + return; + } + + CallbackProvider cbp = AgentTracer.get().getCallbackProvider(RequestContextSlot.APPSEC); + BiFunction> callback = + cbp.getCallback(EVENTS.requestBodyProcessed()); + if (callback == null) { + return; + } + + // conversion to map string -> list of string + java.lang.Iterable strictParts = + st.getStrictParts(); + Map> conv = new HashMap<>(); + for (akka.http.javadsl.model.Multipart.FormData.BodyPart.Strict part : strictParts) { + akka.http.javadsl.model.HttpEntity.Strict entity = part.getEntity(); + if (!(entity instanceof HttpEntity.Strict)) { + continue; + } + + HttpEntity.Strict sentity = (HttpEntity.Strict) entity; + + String name = part.getName(); + List curStrings = conv.get(name); + if (curStrings == null) { + curStrings = new ArrayList<>(); + conv.put(name, curStrings); + } + + String s = + sentity + .getData() + .decodeString( + Unmarshaller$.MODULE$.bestUnmarshallingCharsetFor(sentity).nioCharset()); + curStrings.add(s); + } + + // callback execution + executeCallback(reqCtx, callback, conv, "multipartFormDataUnmarshaller"); + } + + public static Unmarshaller transformStringUnmarshaller( + Unmarshaller original) { + Unmarshaller.EnhancedUnmarshaller enhancedOriginal = + new Unmarshaller.EnhancedUnmarshaller<>(original); + JFunction2 f2 = + (entity, str) -> { + try { + AgentSpan agentSpan = activeSpan(); + if (agentSpan == null || isStrictFormOngoing(agentSpan)) { + return str; + } + + ContentType contentType = entity.getContentType(); + MediaType mediaType = contentType.mediaType(); + if (mediaType != MediaTypes.APPLICATION_JSON + && mediaType != MediaTypes.MULTIPART_FORM_DATA + && mediaType != APPLICATION_X_WWW_FORM_URLENCODED) { + handleArbitraryPostData(str, "HttpEntity -> String unmarshaller"); + } + } catch (Exception e) { + handleException(e, "Error in transformStringUnmarshaller"); + } + + return str; + }; + + return enhancedOriginal.mapWithInput(f2); + } + + public static akka.http.javadsl.unmarshalling.Unmarshaller transformJacksonUnmarshaller( + akka.http.javadsl.unmarshalling.Unmarshaller original) { + return original.thenApply( + ret -> { + try { + handleArbitraryPostData(ret, "jackson unmarshaller"); + } catch (Exception e) { + handleException(e, "Error in transformJacksonUnmarshaller"); + } + return ret; + }); + } + + public static Unmarshaller transformArbitrarySprayUnmarshaller(Unmarshaller original) { + JFunction1 f = + ret -> { + Object conv = tryConvertingScalaContainers(ret, MAX_CONVERSION_DEPTH); + try { + handleArbitraryPostData(conv, "spray unmarshaller"); + } catch (Exception e) { + handleException(e, "Error in transformArbitrarySprayUnmarshaller"); + } + return ret; + }; + return original.map(f); + } + + private static final WeakHashMap STRICT_FORM_SERIALIZATION_ONGOING = + new WeakHashMap<>(); + + // when unmarshalling parts of multipart requests, some other unmarshallers that + // we also instrument, like the string unmarshaller, can run. Those runs happen + // in a subset of the data, so we are not interested in them: instead we want + // to submit all the data at the same time, after having finished fully + // unmarshalling the StrictForm. We suppress the sub-runs of unmarshallers by + // noticing when the StrictForm unmarshaller starts running + private static void markStrictFormOngoing(AgentSpan agentSpan) { + synchronized (STRICT_FORM_SERIALIZATION_ONGOING) { + STRICT_FORM_SERIALIZATION_ONGOING.put(agentSpan.getRequestContext(), Boolean.TRUE); + } + } + + private static void unmarkStrictFormOngoing(AgentSpan agentSpan) { + synchronized (STRICT_FORM_SERIALIZATION_ONGOING) { + STRICT_FORM_SERIALIZATION_ONGOING.remove(agentSpan.getRequestContext()); + } + } + + private static boolean isStrictFormOngoing(AgentSpan agentSpan) { + synchronized (STRICT_FORM_SERIALIZATION_ONGOING) { + return STRICT_FORM_SERIALIZATION_ONGOING.getOrDefault( + agentSpan.getRequestContext(), Boolean.FALSE); + } + } + + private static JFunction1 STRICT_FORM_DATA_POST_TRANSF = + sf -> { + try { + handleStrictFormData(sf); + } catch (Exception e) { + handleException(e, "Error in transformStrictFromUnmarshaller"); + } + // we do not remove the span from STRICT_FORM_SERIALIZATION_ONGOING, + // as the string unmarshaller can still run afterwards. This way, the + // advice will still be skipped + return sf; + }; + + public static class UnmarkStrictFormOngoingOnUnsupportedException + extends JavaPartialFunction { + public static final PartialFunction INSTANCE = + new UnmarkStrictFormOngoingOnUnsupportedException(); + + @Override + public StrictForm apply(Throwable x, boolean isCheck) throws Exception { + if (!(x + instanceof + akka.http.scaladsl.unmarshalling.Unmarshaller.UnsupportedContentTypeException)) { + throw noMatch(); + } + if (isCheck) { + return null; + } + + AgentSpan agentSpan = activeSpan(); + if (agentSpan != null) { + unmarkStrictFormOngoing(agentSpan); + } + throw (Exception) x; + } + } + + public static Unmarshaller transformStrictFormUnmarshaller( + Unmarshaller original) { + JFunction1>>> + wrappedBeforeF = + ec -> { + JFunction1>> g = + mat -> { + JFunction1> h = + entity -> { + AgentSpan agentSpan = activeSpan(); + if (agentSpan != null) { + markStrictFormOngoing(agentSpan); + } + + Future resFut = original.apply(entity, ec, mat); + return resFut + .recover(UnmarkStrictFormOngoingOnUnsupportedException.INSTANCE, ec) + .map(STRICT_FORM_DATA_POST_TRANSF, ec); + }; + return h; + }; + return g; + }; + Unmarshaller wrapped = + Unmarshaller$.MODULE$.withMaterializer(wrappedBeforeF); + + return wrapped; + } + + private static void handleStrictFormData(StrictForm sf) { + Iterator> iterator = sf.fields().iterator(); + Map> conv = new HashMap<>(); + while (iterator.hasNext()) { + Tuple2 next = iterator.next(); + String fieldName = next._1(); + StrictForm.Field field = next._2(); + + List strings = conv.get(fieldName); + if (strings == null) { + strings = new ArrayList<>(); + conv.put(fieldName, strings); + } + + Object strictFieldValue; + try { + Field f = field.getClass().getDeclaredField("value"); + f.setAccessible(true); + strictFieldValue = f.get(field); + } catch (NoSuchFieldException | IllegalAccessException e) { + continue; + } + + if (strictFieldValue instanceof String) { + strings.add((String) strictFieldValue); + } else if (strictFieldValue + instanceof akka.http.scaladsl.model.Multipart$FormData$BodyPart$Strict) { + HttpEntity.Strict sentity = + ((akka.http.scaladsl.model.Multipart$FormData$BodyPart$Strict) strictFieldValue) + .entity(); + String s = + sentity + .getData() + .decodeString( + Unmarshaller$.MODULE$.bestUnmarshallingCharsetFor(sentity).nioCharset()); + strings.add(s); + } + } + + handleArbitraryPostData(conv, "HttpEntity -> StrictForm unmarshaller"); + } + + private static Object tryConvertingScalaContainers(Object obj, int depth) { + if (depth == 0) { + return obj; + } + if (obj instanceof scala.collection.Map) { + scala.collection.Map map = (scala.collection.Map) obj; + Map ret = new HashMap<>(); + Iterator iterator = map.iterator(); + while (iterator.hasNext()) { + Tuple2 next = iterator.next(); + ret.put(next._1(), tryConvertingScalaContainers(next._2(), depth - 1)); + } + return ret; + } else if (obj instanceof scala.collection.Iterable) { + List ret = new ArrayList<>(); + Iterator iterator = ((Iterable) obj).iterator(); + while (iterator.hasNext()) { + Object next = iterator.next(); + ret.add(tryConvertingScalaContainers(next, depth - 1)); + } + return ret; + } + return obj; + } + + private static void handleArbitraryPostData(Object o, String source) { + AgentSpan span = activeSpan(); + RequestContext reqCtx; + if (span == null + || (reqCtx = span.getRequestContext()) == null + || reqCtx.getData(RequestContextSlot.APPSEC) == null) { + return; + } + + CallbackProvider cbp = AgentTracer.get().getCallbackProvider(RequestContextSlot.APPSEC); + BiFunction> callback = + cbp.getCallback(EVENTS.requestBodyProcessed()); + if (callback == null) { + return; + } + + // callback execution + executeCallback(reqCtx, callback, o, source); + } + + private static void handleException(Exception e, String logMessage) { + if (e instanceof BlockingException) { + throw (BlockingException) e; + } + + log.warn(logMessage, e); + } +} diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/CookieDirectivesInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/CookieDirectivesInstrumentation.java index 623b148fd99..629dbc8f40a 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/CookieDirectivesInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/CookieDirectivesInstrumentation.java @@ -55,7 +55,7 @@ public void adviceTransformations(AdviceTransformation transformation) { static class TaintCookieAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_COOKIE_VALUE_STRING) + @Source(SourceTypes.REQUEST_COOKIE_VALUE) static void after(@Advice.Return(readOnly = false) Directive directive) { directive = directive.tmap(TaintCookieFunction.INSTANCE, Tupler$.MODULE$.forTuple(null)); } @@ -63,7 +63,7 @@ static void after(@Advice.Return(readOnly = false) Directive directive) { static class TaintOptionalCookieAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_COOKIE_VALUE_STRING) + @Source(SourceTypes.REQUEST_COOKIE_VALUE) static void after(@Advice.Return(readOnly = false) Directive directive) { directive = directive.tmap(TaintOptionalCookieFunction.INSTANCE, Tupler$.MODULE$.forTuple(null)); diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/CookieHeaderInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/CookieHeaderInstrumentation.java index 5e1119848c1..1c8b35e4bb3 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/CookieHeaderInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/CookieHeaderInstrumentation.java @@ -12,13 +12,11 @@ import akka.http.scaladsl.model.headers.HttpCookiePair; import com.google.auto.service.AutoService; import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.Source; import datadog.trace.api.iast.SourceTypes; import datadog.trace.api.iast.propagation.PropagationModule; -import datadog.trace.api.iast.source.WebModule; -import java.util.ArrayList; -import java.util.List; import net.bytebuddy.asm.Advice; import scala.collection.Iterator; import scala.collection.immutable.Seq; @@ -53,26 +51,25 @@ public void adviceTransformations(AdviceTransformation transformation) { static class TaintAllCookiesAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_COOKIE_VALUE_STRING) + @Source(SourceTypes.REQUEST_COOKIE_VALUE) static void after( @Advice.This HttpHeader cookie, @Advice.Return Seq cookiePairs) { - WebModule mod = InstrumentationBridge.WEB; PropagationModule prop = InstrumentationBridge.PROPAGATION; - if (mod == null || prop == null || cookiePairs == null || cookiePairs.isEmpty()) { + if (prop == null || cookiePairs == null || cookiePairs.isEmpty()) { return; } if (!prop.isTainted(cookie)) { return; } + final IastContext ctx = IastContext.Provider.get(); Iterator iterator = cookiePairs.iterator(); - List cookieNames = new ArrayList<>(); while (iterator.hasNext()) { HttpCookiePair pair = iterator.next(); - cookieNames.add(pair.name()); - prop.taint(SourceTypes.REQUEST_COOKIE_VALUE, pair.name(), pair.value()); + final String name = pair.name(), value = pair.value(); + prop.taint(ctx, name, SourceTypes.REQUEST_COOKIE_NAME, name); + prop.taint(ctx, value, SourceTypes.REQUEST_COOKIE_VALUE, name); } - mod.onCookieNames(cookieNames); } } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/ExtractDirectivesInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/ExtractDirectivesInstrumentation.java index ff9d10c876f..4c04cf6bd13 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/ExtractDirectivesInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/ExtractDirectivesInstrumentation.java @@ -72,7 +72,7 @@ private void instrumentDirective( static class TaintUriDirectiveAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_QUERY_STRING) + @Source(SourceTypes.REQUEST_QUERY) static void after(@Advice.Return(readOnly = false) Directive directive) { directive = directive.tmap(TaintUriFunction.INSTANCE, Tupler$.MODULE$.forTuple(null)); } @@ -80,7 +80,7 @@ static void after(@Advice.Return(readOnly = false) Directive directive) { static class TaintRequestDirectiveAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_BODY_STRING) + @Source(SourceTypes.REQUEST_BODY) static void after(@Advice.Return(readOnly = false) Directive directive) { directive = directive.tmap(TaintRequestFunction.INSTANCE, Tupler$.MODULE$.forTuple(null)); } @@ -88,7 +88,7 @@ static void after(@Advice.Return(readOnly = false) Directive directive) { static class TaintRequestContextDirectiveAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_BODY_STRING) + @Source(SourceTypes.REQUEST_BODY) static void after(@Advice.Return(readOnly = false) Directive directive) { directive = directive.tmap(TaintRequestContextFunction.INSTANCE, Tupler$.MODULE$.forTuple(null)); diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/FormFieldDirectivesInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/FormFieldDirectivesInstrumentation.java index 6dc51c52502..33b5f753634 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/FormFieldDirectivesInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/FormFieldDirectivesInstrumentation.java @@ -101,7 +101,7 @@ private void transformDirective( static class TaintSingleFormFieldDirectiveOldScalaAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PARAMETER_VALUE_STRING) + @Source(SourceTypes.REQUEST_PARAMETER_VALUE) static void after( @Advice.Return(readOnly = false, typing = Assigner.Typing.DYNAMIC) Directive retval, @Advice.Argument(1) FormFieldDirectives.FieldMagnet fmag) { @@ -116,7 +116,7 @@ static void after( static class TaintSingleFormFieldDirectiveNewScalaAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PARAMETER_VALUE_STRING) + @Source(SourceTypes.REQUEST_PARAMETER_VALUE) static void after( @Advice.Return(readOnly = false, typing = Assigner.Typing.DYNAMIC) Directive retval, @Advice.Argument(0) FormFieldDirectives.FieldMagnet fmag) { diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HeaderNameCallSite.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HeaderNameCallSite.java index f673a2b8349..f7f330a14b5 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HeaderNameCallSite.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HeaderNameCallSite.java @@ -6,16 +6,14 @@ import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.Source; import datadog.trace.api.iast.SourceTypes; -import datadog.trace.api.iast.Taintable; -import datadog.trace.api.iast.source.WebModule; -import java.util.Collections; +import datadog.trace.api.iast.propagation.PropagationModule; /** * Detects when a header name is directly called from user code. This uses call site instrumentation * because there are many calls to {@link HttpHeader#name()} inside akka-http code that we don't * care about. */ -@Source(value = SourceTypes.REQUEST_HEADER_NAME_STRING) +@Source(value = SourceTypes.REQUEST_HEADER_NAME) @CallSite(spi = IastCallSites.class) public class HeaderNameCallSite { @@ -23,18 +21,15 @@ public class HeaderNameCallSite { @CallSite.After( "java.lang.String akka.http.scaladsl.model.HttpHeader.name()") // subtype of the first public static String after(@CallSite.This HttpHeader header, @CallSite.Return String result) { - WebModule module = InstrumentationBridge.WEB; + PropagationModule module = InstrumentationBridge.PROPAGATION; if (module == null) { return result; } try { - if (header instanceof Taintable && ((Taintable) header).$DD$isTainted()) { - module.onHeaderNames(Collections.singletonList(result)); - } - return result; + module.taintIfTainted(result, header, SourceTypes.REQUEST_HEADER_NAME, result); } catch (final Throwable e) { module.onUnexpectedException("onHeaderNames threw", e); - return result; } + return result; } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HttpHeaderSubclassesInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HttpHeaderSubclassesInstrumentation.java index 8e71ae0d0f8..57cd04ce127 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HttpHeaderSubclassesInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HttpHeaderSubclassesInstrumentation.java @@ -14,7 +14,6 @@ import datadog.trace.agent.tooling.Instrumenter; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.Propagation; -import datadog.trace.api.iast.Taintable; import datadog.trace.api.iast.propagation.PropagationModule; import net.bytebuddy.asm.Advice; import net.bytebuddy.description.type.TypeDescription; @@ -59,11 +58,11 @@ static class HttpHeaderSubclassesAdvice { static void onExit(@Advice.This HttpHeader h, @Advice.Return String retVal) { PropagationModule propagation = InstrumentationBridge.PROPAGATION; - if (propagation == null || !(h instanceof Taintable)) { + if (propagation == null) { return; } - propagation.taintIfInputIsTainted(retVal, h); + propagation.taintIfTainted(retVal, h); } } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HttpRequestInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HttpRequestInstrumentation.java index 775c5d0ab4a..a0c5f432e85 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HttpRequestInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/HttpRequestInstrumentation.java @@ -13,11 +13,11 @@ import akka.http.scaladsl.model.HttpRequest; import com.google.auto.service.AutoService; import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.Propagation; import datadog.trace.api.iast.Source; import datadog.trace.api.iast.SourceTypes; -import datadog.trace.api.iast.Taintable; import datadog.trace.api.iast.propagation.PropagationModule; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import net.bytebuddy.asm.Advice; @@ -59,7 +59,7 @@ public void adviceTransformations(AdviceTransformation transformation) { @SuppressFBWarnings("BC_IMPOSSIBLE_INSTANCEOF") static class RequestHeadersAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_HEADER_VALUE_STRING) + @Source(SourceTypes.REQUEST_HEADER_VALUE) static void onExit( @Advice.This HttpRequest thiz, @Advice.Return(readOnly = false) Seq headers) { PropagationModule propagation = InstrumentationBridge.PROPAGATION; @@ -67,27 +67,20 @@ static void onExit( return; } - if (!((Object) thiz instanceof Taintable)) { - return; - } - if (!((Taintable) (Object) thiz).$DD$isTainted()) { + if (!propagation.isTainted(thiz)) { return; } + final IastContext ctx = IastContext.Provider.get(); Iterator iterator = headers.iterator(); while (iterator.hasNext()) { HttpHeader h = iterator.next(); - if (!(h instanceof Taintable)) { - continue; - } - - Taintable t = (Taintable) h; - if (t.$DD$isTainted()) { + if (propagation.isTainted(h)) { continue; } // unfortunately, the call to h.value() is instrumented, but // because the call to taint() only happens after, the call is a noop - propagation.taint(SourceTypes.REQUEST_HEADER_VALUE, h.name(), h.value(), t); + propagation.taint(ctx, h, SourceTypes.REQUEST_HEADER_VALUE, h.name(), h.value()); } } } @@ -103,13 +96,11 @@ static void onExit( return; } - if (entity instanceof Taintable) { - if (((Taintable) entity).$DD$isTainted()) { - return; - } + if (propagation.isTainted(entity)) { + return; } - propagation.taintIfInputIsTainted(entity, thiz); + propagation.taintIfTainted(entity, thiz); } } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/MarshallingDirectivesInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/MarshallingDirectivesInstrumentation.java index b1dc0d68ead..a8a4b557174 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/MarshallingDirectivesInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/MarshallingDirectivesInstrumentation.java @@ -76,7 +76,7 @@ public void adviceTransformations(AdviceTransformation transformation) { static class TaintUnmarshallerInputOldScalaAdvice { @Advice.OnMethodEnter(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_BODY_STRING) + @Source(SourceTypes.REQUEST_BODY) static void before(@Advice.Argument(readOnly = false, value = 1) Unmarshaller unmarshaller) { PropagationModule mod = InstrumentationBridge.PROPAGATION; if (mod != null) { @@ -87,7 +87,7 @@ static void before(@Advice.Argument(readOnly = false, value = 1) Unmarshaller un static class TaintUnmarshallerInputNewScalaAdvice { @Advice.OnMethodEnter(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_BODY_STRING) + @Source(SourceTypes.REQUEST_BODY) static void before(@Advice.Argument(readOnly = false, value = 0) Unmarshaller unmarshaller) { PropagationModule mod = InstrumentationBridge.PROPAGATION; if (mod != null) { diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/ParameterDirectivesInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/ParameterDirectivesInstrumentation.java index f25c72ffa75..6ddc32aa877 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/ParameterDirectivesInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/ParameterDirectivesInstrumentation.java @@ -100,7 +100,7 @@ private void transformDirective( static class TaintMultiMapDirectiveAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PARAMETER_VALUE_STRING) + @Source(SourceTypes.REQUEST_PARAMETER_VALUE) static void after(@Advice.Return(readOnly = false) Directive directive) { directive = directive.tmap(TaintMultiMapFunction.INSTANCE, Tupler$.MODULE$.forTuple(null)); } @@ -108,7 +108,7 @@ static void after(@Advice.Return(readOnly = false) Directive directive) { static class TaintMapDirectiveAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PARAMETER_VALUE_STRING) + @Source(SourceTypes.REQUEST_PARAMETER_VALUE) static void after(@Advice.Return(readOnly = false) Directive directive) { directive = directive.tmap(TaintMapFunction.INSTANCE, Tupler$.MODULE$.forTuple(null)); } @@ -116,7 +116,7 @@ static void after(@Advice.Return(readOnly = false) Directive directive) { static class TaintSeqDirectiveAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PARAMETER_VALUE_STRING) + @Source(SourceTypes.REQUEST_PARAMETER_VALUE) static void after(@Advice.Return(readOnly = false) Directive directive) { directive = directive.tmap(TaintSeqFunction.INSTANCE, Tupler$.MODULE$.forTuple(null)); } @@ -124,7 +124,7 @@ static void after(@Advice.Return(readOnly = false) Directive directive) { static class TaintSingleParameterDirectiveOldScalaAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PARAMETER_VALUE_STRING) + @Source(SourceTypes.REQUEST_PARAMETER_VALUE) static void after( @Advice.Return(readOnly = false) Object retval, @Advice.Argument(1) ParameterDirectives.ParamMagnet pmag) { @@ -144,7 +144,7 @@ static void after( static class TaintSingleParameterDirectiveNewScalaAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PARAMETER_VALUE_STRING) + @Source(SourceTypes.REQUEST_PARAMETER_VALUE) static void after( @Advice.Return(readOnly = false) Object retval, @Advice.Argument(0) ParameterDirectives.ParamMagnet pmag) { diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/PathMatcherInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/PathMatcherInstrumentation.java index 46371a610d4..80c3b2687f0 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/PathMatcherInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/PathMatcherInstrumentation.java @@ -11,6 +11,7 @@ import datadog.trace.agent.tooling.Instrumenter; import datadog.trace.api.gateway.RequestContext; import datadog.trace.api.gateway.RequestContextSlot; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.Source; import datadog.trace.api.iast.SourceTypes; @@ -47,7 +48,7 @@ public void adviceTransformations(AdviceTransformation transformation) { @RequiresRequestContext(RequestContextSlot.IAST) static class PathMatcherAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PATH_PARAMETER_STRING) + @Source(SourceTypes.REQUEST_PATH_PARAMETER) static void onExit( @Advice.Argument(1) Object extractions, @ActiveRequestContext RequestContext reqCtx) { if (!(extractions instanceof scala.Tuple1)) { @@ -68,11 +69,8 @@ static void onExit( } if (value instanceof String) { - module.taint( - reqCtx.getData(RequestContextSlot.IAST), - SourceTypes.REQUEST_PATH_PARAMETER, - null, - (String) value); + final IastContext ctx = reqCtx.getData(RequestContextSlot.IAST); + module.taint(ctx, value, SourceTypes.REQUEST_PATH_PARAMETER); } } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/RequestContextInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/RequestContextInstrumentation.java index 7629fab2538..f18148a7ceb 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/RequestContextInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/RequestContextInstrumentation.java @@ -13,7 +13,6 @@ import datadog.trace.agent.tooling.Instrumenter; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.Propagation; -import datadog.trace.api.iast.Taintable; import datadog.trace.api.iast.propagation.PropagationModule; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import net.bytebuddy.asm.Advice; @@ -50,14 +49,11 @@ static void onExit( @Advice.This RequestContext requestContext, @Advice.Return HttpRequest request) { PropagationModule propagation = InstrumentationBridge.PROPAGATION; - if (propagation == null - || !(requestContext instanceof Taintable) - || !((Object) request instanceof Taintable) - || ((Taintable) (Object) request).$DD$isTainted()) { + if (propagation == null || propagation.isTainted(request)) { return; } - propagation.taintIfInputIsTainted(request, requestContext); + propagation.taintIfTainted(request, requestContext); } } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/TraitMethodMatchers.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/TraitMethodMatchers.java index 2d74073aedf..fb58538102e 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/TraitMethodMatchers.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/TraitMethodMatchers.java @@ -1,35 +1,16 @@ package datadog.trace.instrumentation.akkahttp.iast; import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; -import static net.bytebuddy.matcher.ElementMatchers.isMethod; -import static net.bytebuddy.matcher.ElementMatchers.isStatic; -import static net.bytebuddy.matcher.ElementMatchers.not; +import static datadog.trace.agent.tooling.bytebuddy.matcher.ScalaTraitMatchers.isTraitMethod; import static net.bytebuddy.matcher.ElementMatchers.returns; -import static net.bytebuddy.matcher.ElementMatchers.takesArgument; -import static net.bytebuddy.matcher.ElementMatchers.takesArguments; import net.bytebuddy.description.method.MethodDescription; import net.bytebuddy.matcher.ElementMatcher; public class TraitMethodMatchers { public static ElementMatcher.Junction isTraitDirectiveMethod( - String traitName, String name, String... argumentTypes) { - - ElementMatcher.Junction scalaOldArgs = - isStatic() - .and(takesArguments(argumentTypes.length + 1)) - .and(takesArgument(0, named(traitName))); - ElementMatcher.Junction scalaNewArgs = - not(isStatic()).and(takesArguments(argumentTypes.length)); - - for (int i = 0; i < argumentTypes.length; i++) { - scalaOldArgs = scalaOldArgs.and(takesArgument(i + 1, named(argumentTypes[i]))); - scalaNewArgs = scalaNewArgs.and(takesArgument(i, named(argumentTypes[i]))); - } - - return isMethod() - .and(named(name)) - .and(returns(named("akka.http.scaladsl.server.Directive"))) - .and(scalaOldArgs.or(scalaNewArgs)); + String traitName, String name, Object... argumentTypes) { + return isTraitMethod(traitName, name, (Object[]) argumentTypes) + .and(returns(named("akka.http.scaladsl.server.Directive"))); } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/UriInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/UriInstrumentation.java index 907c565ae1d..49a03c9bf32 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/UriInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/UriInstrumentation.java @@ -11,13 +11,12 @@ import akka.http.scaladsl.model.Uri; import com.google.auto.service.AutoService; import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.Propagation; import datadog.trace.api.iast.Source; import datadog.trace.api.iast.SourceTypes; import datadog.trace.api.iast.propagation.PropagationModule; -import datadog.trace.api.iast.source.WebModule; -import java.util.Collections; import net.bytebuddy.asm.Advice; import scala.Tuple2; import scala.collection.Iterator; @@ -70,7 +69,7 @@ static void after(@Advice.This Uri uri, @Advice.Return scala.Option ret) if (mod == null || ret.isEmpty()) { return; } - mod.taintIfInputIsTainted(ret.get(), uri); + mod.taintIfTainted(ret.get(), uri); } } @@ -78,11 +77,10 @@ public static class TaintQueryAdvice { // bind uri to a variable of type Object so that this advice can also // be used from FromDataInstrumentaton @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PARAMETER_VALUE_STRING) + @Source(SourceTypes.REQUEST_PARAMETER_VALUE) static void after(@Advice.This /*Uri*/ Object uri, @Advice.Return Uri.Query ret) { - WebModule web = InstrumentationBridge.WEB; PropagationModule prop = InstrumentationBridge.PROPAGATION; - if (prop == null || web == null || ret.isEmpty()) { + if (prop == null || ret.isEmpty()) { return; } @@ -90,11 +88,13 @@ static void after(@Advice.This /*Uri*/ Object uri, @Advice.Return Uri.Query ret) return; } + final IastContext ctx = IastContext.Provider.get(); Iterator> iterator = ret.iterator(); while (iterator.hasNext()) { Tuple2 pair = iterator.next(); - web.onParameterNames(Collections.singleton(pair._1())); - prop.taint(SourceTypes.REQUEST_PARAMETER_VALUE, pair._1(), pair._2()); + final String name = pair._1(), value = pair._2(); + prop.taint(ctx, name, SourceTypes.REQUEST_PARAMETER_NAME, name); + prop.taint(ctx, value, SourceTypes.REQUEST_PARAMETER_VALUE, name); } } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintCookieFunction.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintCookieFunction.java index 2d9c23c51ba..0955a0a9e49 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintCookieFunction.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintCookieFunction.java @@ -16,10 +16,13 @@ public Tuple1 apply(Tuple1 v1) { HttpCookiePair httpCookiePair = v1._1(); PropagationModule mod = InstrumentationBridge.PROPAGATION; - if (mod == null) { + if (mod == null || httpCookiePair == null) { return v1; } - mod.taint(SourceTypes.REQUEST_COOKIE_VALUE, httpCookiePair.name(), httpCookiePair.value()); + final String name = httpCookiePair.name(); + final String value = httpCookiePair.value(); + mod.taint(name, SourceTypes.REQUEST_COOKIE_NAME, name); + mod.taint(value, SourceTypes.REQUEST_COOKIE_VALUE, name); return v1; } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintFutureHelper.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintFutureHelper.java index 922397f52f6..c1ff9dbc8c2 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintFutureHelper.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintFutureHelper.java @@ -10,7 +10,7 @@ public static Future wrapFuture( Future f, Object input, PropagationModule mod, ExecutionContext ec) { JFunction1 mapf = t -> { - mod.taintIfInputIsTainted(t, input); + mod.taintIfTainted(t, input); return t; }; return f.map(mapf, ec); diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintMapFunction.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintMapFunction.java index 40d75a9e15f..705b1921742 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintMapFunction.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintMapFunction.java @@ -1,9 +1,9 @@ package datadog.trace.instrumentation.akkahttp.iast.helpers; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.SourceTypes; import datadog.trace.api.iast.propagation.PropagationModule; -import datadog.trace.api.iast.source.WebModule; import scala.Tuple1; import scala.Tuple2; import scala.collection.Iterator; @@ -19,18 +19,17 @@ public Tuple1> apply(Tuple1> v1) { Map m = v1._1; PropagationModule prop = InstrumentationBridge.PROPAGATION; - WebModule web = InstrumentationBridge.WEB; - if (web == null || prop == null || m == null) { + if (prop == null || m == null || m.isEmpty()) { return v1; } - java.util.List keysAsCollection = ScalaToJava.keySetAsCollection(m); - web.onParameterNames(keysAsCollection); - + final IastContext ctx = IastContext.Provider.get(); Iterator> iterator = m.iterator(); while (iterator.hasNext()) { Tuple2 e = iterator.next(); - prop.taint(SourceTypes.REQUEST_PARAMETER_VALUE, e._1(), e._2()); + final String name = e._1(), value = e._2(); + prop.taint(ctx, name, SourceTypes.REQUEST_PARAMETER_NAME, name); + prop.taint(ctx, value, SourceTypes.REQUEST_PARAMETER_VALUE, name); } return v1; diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintMultiMapFunction.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintMultiMapFunction.java index dcfe919d598..b72ac179c0b 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintMultiMapFunction.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintMultiMapFunction.java @@ -1,7 +1,9 @@ package datadog.trace.instrumentation.akkahttp.iast.helpers; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.InstrumentationBridge; -import datadog.trace.api.iast.source.WebModule; +import datadog.trace.api.iast.SourceTypes; +import datadog.trace.api.iast.propagation.PropagationModule; import scala.Tuple1; import scala.Tuple2; import scala.collection.Iterator; @@ -17,19 +19,21 @@ public class TaintMultiMapFunction public Tuple1>> apply(Tuple1>> v1) { Map> m = v1._1; - WebModule mod = InstrumentationBridge.WEB; - if (mod == null || m == null) { + PropagationModule mod = InstrumentationBridge.PROPAGATION; + if (mod == null || m == null || m.isEmpty()) { return v1; } - java.util.List keysAsCollection = ScalaToJava.keySetAsCollection(m); - mod.onParameterNames(keysAsCollection); - + final IastContext ctx = IastContext.Provider.get(); Iterator>> entriesIterator = m.iterator(); while (entriesIterator.hasNext()) { Tuple2> e = entriesIterator.next(); + final String name = e._1(); + mod.taint(ctx, name, SourceTypes.REQUEST_PARAMETER_NAME, name); List values = e._2(); - mod.onParameterValues(e._1(), ScalaToJava.listAsList(values)); + for (final String value : ScalaToJava.listAsList(values)) { + mod.taint(ctx, value, SourceTypes.REQUEST_PARAMETER_VALUE, name); + } } return v1; diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintOptionalCookieFunction.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintOptionalCookieFunction.java index 1f6d8bb62a2..620bea2ebf8 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintOptionalCookieFunction.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintOptionalCookieFunction.java @@ -17,13 +17,14 @@ public Tuple1> apply(Tuple1> v1) { Option httpCookiePair = v1._1(); PropagationModule mod = InstrumentationBridge.PROPAGATION; - if (mod == null || httpCookiePair.isEmpty()) { + if (mod == null || httpCookiePair == null || httpCookiePair.isEmpty()) { return v1; } - mod.taint( - SourceTypes.REQUEST_COOKIE_VALUE, - httpCookiePair.get().name(), - httpCookiePair.get().value()); + final HttpCookiePair cookie = httpCookiePair.get(); + final String name = cookie.name(); + final String value = cookie.value(); + mod.taint(name, SourceTypes.REQUEST_COOKIE_NAME, name); + mod.taint(value, SourceTypes.REQUEST_COOKIE_VALUE, name); return v1; } } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintRequestContextFunction.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintRequestContextFunction.java index 8454bf9445f..bc86e937643 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintRequestContextFunction.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintRequestContextFunction.java @@ -3,7 +3,6 @@ import akka.http.scaladsl.server.RequestContext; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.SourceTypes; -import datadog.trace.api.iast.Taintable; import datadog.trace.api.iast.propagation.PropagationModule; import scala.Tuple1; import scala.compat.java8.JFunction1; @@ -17,10 +16,10 @@ public Tuple1 apply(Tuple1 v1) { RequestContext reqCtx = v1._1(); PropagationModule mod = InstrumentationBridge.PROPAGATION; - if (mod == null || !(reqCtx instanceof Taintable)) { + if (mod == null || reqCtx == null) { return v1; } - mod.taintObject(SourceTypes.REQUEST_BODY, reqCtx); + mod.taint(reqCtx, SourceTypes.REQUEST_BODY); return v1; } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintRequestFunction.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintRequestFunction.java index 6b948f2bd33..7894326ab89 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintRequestFunction.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintRequestFunction.java @@ -3,7 +3,6 @@ import akka.http.scaladsl.model.HttpRequest; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.SourceTypes; -import datadog.trace.api.iast.Taintable; import datadog.trace.api.iast.propagation.PropagationModule; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import scala.Tuple1; @@ -18,10 +17,10 @@ public Tuple1 apply(Tuple1 v1) { HttpRequest httpRequest = v1._1(); PropagationModule mod = InstrumentationBridge.PROPAGATION; - if (mod == null || !((Object) httpRequest instanceof Taintable)) { + if (mod == null || httpRequest == null) { return v1; } - mod.taintObject(SourceTypes.REQUEST_BODY, httpRequest); + mod.taint(httpRequest, SourceTypes.REQUEST_BODY); return v1; } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintSeqFunction.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintSeqFunction.java index 0d41fc32e56..fdec8e35b4e 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintSeqFunction.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintSeqFunction.java @@ -1,9 +1,9 @@ package datadog.trace.instrumentation.akkahttp.iast.helpers; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.SourceTypes; import datadog.trace.api.iast.propagation.PropagationModule; -import datadog.trace.api.iast.source.WebModule; import java.util.Collections; import java.util.IdentityHashMap; import java.util.Set; @@ -22,22 +22,22 @@ public class TaintSeqFunction public Tuple1>> apply(Tuple1>> v1) { Seq> seq = v1._1; - WebModule web = InstrumentationBridge.WEB; PropagationModule prop = InstrumentationBridge.PROPAGATION; - if (web == null || prop == null || seq == null) { + if (prop == null || seq == null || seq.isEmpty()) { return v1; } + final IastContext ctx = IastContext.Provider.get(); Iterator> iterator = seq.iterator(); - Set seenKeys = Collections.newSetFromMap(new IdentityHashMap()); + Set seenKeys = Collections.newSetFromMap(new IdentityHashMap<>()); while (iterator.hasNext()) { Tuple2 t = iterator.next(); String name = t._1(); String value = t._2(); if (seenKeys.add(name)) { - web.onParameterNames(Collections.singleton(name)); + prop.taint(ctx, name, SourceTypes.REQUEST_PARAMETER_NAME, name); } - prop.taint(SourceTypes.REQUEST_PARAMETER_VALUE, name, value); + prop.taint(ctx, value, SourceTypes.REQUEST_PARAMETER_VALUE, name); } return v1; diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintSingleParameterFunction.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintSingleParameterFunction.java index e3130be8062..f6c1073fff8 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintSingleParameterFunction.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintSingleParameterFunction.java @@ -1,5 +1,6 @@ package datadog.trace.instrumentation.akkahttp.iast.helpers; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.SourceTypes; import datadog.trace.api.iast.propagation.PropagationModule; @@ -40,15 +41,16 @@ public Tuple1 apply(Tuple1 v1) { } if (value instanceof Iterable) { - Iterator iterator = ((Iterable) value).iterator(); + final IastContext ctx = IastContext.Provider.get(); + Iterator iterator = ((Iterable) value).iterator(); while (iterator.hasNext()) { Object o = iterator.next(); if (o instanceof String) { - mod.taint(SourceTypes.REQUEST_PARAMETER_VALUE, paramName, (String) o); + mod.taint(ctx, (String) o, SourceTypes.REQUEST_PARAMETER_VALUE, paramName); } } } else if (value instanceof String) { - mod.taint(SourceTypes.REQUEST_PARAMETER_VALUE, paramName, (String) value); + mod.taint((String) value, SourceTypes.REQUEST_PARAMETER_VALUE, paramName); } return v1; diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintUnmarshaller.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintUnmarshaller.java index 88d1580643a..d109cc92d9d 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintUnmarshaller.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintUnmarshaller.java @@ -28,7 +28,7 @@ public TaintUnmarshaller(PropagationModule propagationModule, Unmarshaller @Override public Future apply(A value, ExecutionContext ec, Materializer materializer) { - propagationModule.taintObject(SourceTypes.REQUEST_BODY, value); + propagationModule.taint(value, SourceTypes.REQUEST_BODY); return delegate.apply(value, ec, materializer); } diff --git a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintUriFunction.java b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintUriFunction.java index eae576a422a..5feb8315119 100644 --- a/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintUriFunction.java +++ b/dd-java-agent/instrumentation/akka-http-10.0/src/main/java/datadog/trace/instrumentation/akkahttp/iast/helpers/TaintUriFunction.java @@ -3,7 +3,6 @@ import akka.http.scaladsl.model.Uri; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.SourceTypes; -import datadog.trace.api.iast.Taintable; import datadog.trace.api.iast.propagation.PropagationModule; import scala.Tuple1; import scala.compat.java8.JFunction1; @@ -16,10 +15,10 @@ public Tuple1 apply(Tuple1 v1) { Uri uri = v1._1(); PropagationModule mod = InstrumentationBridge.PROPAGATION; - if (mod == null || !(uri instanceof Taintable)) { + if (mod == null) { return v1; } - mod.taintObject(SourceTypes.REQUEST_QUERY, uri); + mod.taint(uri, SourceTypes.REQUEST_QUERY); return v1; } diff --git a/dd-java-agent/instrumentation/akka-http-10.2-iast/src/main/java/datadog/trace/instrumentation/akkahttp102/iast/ParameterDirectivesImplInstrumentation.java b/dd-java-agent/instrumentation/akka-http-10.2-iast/src/main/java/datadog/trace/instrumentation/akkahttp102/iast/ParameterDirectivesImplInstrumentation.java index 1be72c41bcb..3ec1eb1ee3c 100644 --- a/dd-java-agent/instrumentation/akka-http-10.2-iast/src/main/java/datadog/trace/instrumentation/akkahttp102/iast/ParameterDirectivesImplInstrumentation.java +++ b/dd-java-agent/instrumentation/akka-http-10.2-iast/src/main/java/datadog/trace/instrumentation/akkahttp102/iast/ParameterDirectivesImplInstrumentation.java @@ -69,7 +69,7 @@ public void adviceTransformations(AdviceTransformation transformation) { static class FilterAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PARAMETER_VALUE_STRING) + @Source(SourceTypes.REQUEST_PARAMETER_VALUE) static void after( @Advice.Argument(0) String paramName, @Advice.Return(readOnly = false) Directive /*>*/ retval) { @@ -84,7 +84,7 @@ static void after( static class RepeatedFilterAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - @Source(SourceTypes.REQUEST_PARAMETER_VALUE_STRING) + @Source(SourceTypes.REQUEST_PARAMETER_VALUE) static void after( @Advice.Argument(0) String paramName, @Advice.Return(readOnly = false) Directive /*>>*/ retval) { diff --git a/dd-java-agent/instrumentation/akka-http-10.2-iast/src/main/java/datadog/trace/instrumentation/akkahttp102/iast/helpers/TaintParametersFunction.java b/dd-java-agent/instrumentation/akka-http-10.2-iast/src/main/java/datadog/trace/instrumentation/akkahttp102/iast/helpers/TaintParametersFunction.java index 213ee7638e1..87008ac5ab4 100644 --- a/dd-java-agent/instrumentation/akka-http-10.2-iast/src/main/java/datadog/trace/instrumentation/akkahttp102/iast/helpers/TaintParametersFunction.java +++ b/dd-java-agent/instrumentation/akka-http-10.2-iast/src/main/java/datadog/trace/instrumentation/akkahttp102/iast/helpers/TaintParametersFunction.java @@ -1,5 +1,6 @@ package datadog.trace.instrumentation.akkahttp102.iast.helpers; +import datadog.trace.api.iast.IastContext; import datadog.trace.api.iast.InstrumentationBridge; import datadog.trace.api.iast.SourceTypes; import datadog.trace.api.iast.propagation.PropagationModule; @@ -33,15 +34,16 @@ public Tuple1 apply(Tuple1 v1) { } if (value instanceof Iterable) { - Iterator iterator = ((Iterable) value).iterator(); + final IastContext ctx = IastContext.Provider.get(); + Iterator iterator = ((Iterable) value).iterator(); while (iterator.hasNext()) { Object o = iterator.next(); if (o instanceof String) { - mod.taint(SourceTypes.REQUEST_PARAMETER_VALUE, paramName, (String) o); + mod.taint(ctx, (String) o, SourceTypes.REQUEST_PARAMETER_VALUE, paramName); } } } else if (value instanceof String) { - mod.taint(SourceTypes.REQUEST_PARAMETER_VALUE, paramName, (String) value); + mod.taint((String) value, SourceTypes.REQUEST_PARAMETER_VALUE, paramName); } return v1; diff --git a/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/ApacheHttpAsyncClientDecorator.java b/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/ApacheHttpAsyncClientDecorator.java index c16307d1a0e..5f9e44fd6df 100644 --- a/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/ApacheHttpAsyncClientDecorator.java +++ b/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/ApacheHttpAsyncClientDecorator.java @@ -1,20 +1,18 @@ package datadog.trace.instrumentation.apachehttpasyncclient; -import datadog.trace.bootstrap.instrumentation.api.URIUtils; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; import datadog.trace.bootstrap.instrumentation.decorator.HttpClientDecorator; import java.net.URI; import java.net.URISyntaxException; import org.apache.http.Header; -import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; -import org.apache.http.RequestLine; import org.apache.http.StatusLine; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpCoreContext; -public class ApacheHttpAsyncClientDecorator extends HttpClientDecorator { +public class ApacheHttpAsyncClientDecorator + extends HttpClientDecorator { public static final CharSequence APACHE_HTTPASYNCCLIENT = UTF8BytesString.create("apache-httpasyncclient"); @@ -34,28 +32,13 @@ protected CharSequence component() { } @Override - protected String method(final HttpRequest request) { - if (request instanceof HttpUriRequest) { - return ((HttpUriRequest) request).getMethod(); - } else { - final RequestLine requestLine = request.getRequestLine(); - return requestLine == null ? null : requestLine.getMethod(); - } + protected String method(final HttpUriRequest request) { + return request.getMethod(); } @Override - protected URI url(final HttpRequest request) throws URISyntaxException { - /* - * Note: this is essentially an optimization: HttpUriRequest allows quicker access to required information. - * The downside is that we need to load HttpUriRequest which essentially means we depend on httpasyncclient - * library depending on httpclient library. Currently this seems to be the case. - */ - if (request instanceof HttpUriRequest) { - return ((HttpUriRequest) request).getURI(); - } else { - final RequestLine requestLine = request.getRequestLine(); - return requestLine == null ? null : URIUtils.safeParse(requestLine.getUri()); - } + protected URI url(final HttpUriRequest request) throws URISyntaxException { + return request.getURI(); } @Override @@ -71,7 +54,7 @@ protected int status(final HttpContext context) { } @Override - protected String getRequestHeader(HttpRequest request, String headerName) { + protected String getRequestHeader(HttpUriRequest request, String headerName) { Header header = request.getFirstHeader(headerName); if (header != null) { return header.getValue(); diff --git a/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/ApacheHttpAsyncClientInstrumentation.java b/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/ApacheHttpAsyncClientInstrumentation.java index 228e97a3524..815737bfb9f 100644 --- a/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/ApacheHttpAsyncClientInstrumentation.java +++ b/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/ApacheHttpAsyncClientInstrumentation.java @@ -63,7 +63,8 @@ public String[] helperClassNames() { packageName + ".HttpHeadersInjectAdapter", packageName + ".DelegatingRequestProducer", packageName + ".TraceContinuedFutureCallback", - packageName + ".ApacheHttpAsyncClientDecorator" + packageName + ".ApacheHttpAsyncClientDecorator", + packageName + ".HostAndRequestAsHttpUriRequest" }; } diff --git a/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/DelegatingRequestProducer.java b/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/DelegatingRequestProducer.java index 3c4a4b8f18f..c39f1d54fa3 100644 --- a/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/DelegatingRequestProducer.java +++ b/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/DelegatingRequestProducer.java @@ -32,7 +32,7 @@ public HttpHost getTarget() { @Override public HttpRequest generateRequest() throws IOException, HttpException { final HttpRequest request = delegate.generateRequest(); - DECORATE.onRequest(span, request); + DECORATE.onRequest(span, new HostAndRequestAsHttpUriRequest(delegate.getTarget(), request)); propagate().inject(span, request, SETTER); propagate() diff --git a/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/HostAndRequestAsHttpUriRequest.java b/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/HostAndRequestAsHttpUriRequest.java new file mode 100644 index 00000000000..7bf1f316b60 --- /dev/null +++ b/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/main/java/datadog/trace/instrumentation/apachehttpasyncclient/HostAndRequestAsHttpUriRequest.java @@ -0,0 +1,78 @@ +package datadog.trace.instrumentation.apachehttpasyncclient; + +import datadog.trace.api.Config; +import datadog.trace.bootstrap.instrumentation.api.URIUtils; +import org.apache.http.Header; +import org.apache.http.HttpHost; +import org.apache.http.HttpRequest; +import org.apache.http.ProtocolVersion; +import org.apache.http.RequestLine; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.message.AbstractHttpMessage; + +/** Wraps HttpHost and HttpRequest into a HttpUriRequest for decorators and injectors */ +public class HostAndRequestAsHttpUriRequest extends AbstractHttpMessage implements HttpUriRequest { + // other versions are not affected by this url parsing bug + private static final boolean legacyTracingEnabled = + Config.get().isLegacyTracingEnabled(false, "httpasyncclient4"); + private final String method; + private final RequestLine requestLine; + private final ProtocolVersion protocolVersion; + private final java.net.URI uri; + private final HttpRequest actualRequest; + + public HostAndRequestAsHttpUriRequest(final HttpHost httpHost, final HttpRequest httpRequest) { + method = httpRequest.getRequestLine().getMethod(); + requestLine = httpRequest.getRequestLine(); + protocolVersion = requestLine.getProtocolVersion(); + uri = + legacyTracingEnabled + ? URIUtils.safeParse(requestLine.getUri()) + : URIUtils.safeConcat(httpHost.toURI(), requestLine.getUri()); + actualRequest = httpRequest; + } + + @Override + public void abort() throws UnsupportedOperationException { + throw new UnsupportedOperationException(); + } + + @Override + public boolean isAborted() { + return false; + } + + @Override + public void setHeader(final String name, final String value) { + actualRequest.setHeader(name, value); + } + + @Override + public String getMethod() { + return method; + } + + @Override + public RequestLine getRequestLine() { + return requestLine; + } + + @Override + public ProtocolVersion getProtocolVersion() { + return protocolVersion; + } + + @Override + public java.net.URI getURI() { + return uri; + } + + @Override + public Header getFirstHeader(String name) { + return actualRequest.getFirstHeader(name); + } + + public HttpRequest getActualRequest() { + return actualRequest; + } +} diff --git a/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/test/groovy/ApacheHttpAsyncClientTest.groovy b/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/test/groovy/ApacheHttpAsyncClientTest.groovy index 2a778d568cf..bc57def64cf 100644 --- a/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/test/groovy/ApacheHttpAsyncClientTest.groovy +++ b/dd-java-agent/instrumentation/apache-httpasyncclient-4/src/test/groovy/ApacheHttpAsyncClientTest.groovy @@ -1,8 +1,11 @@ +import datadog.trace.agent.test.asserts.TraceAssert import datadog.trace.agent.test.base.HttpClientTest import datadog.trace.agent.test.naming.TestingGenericHttpNamingConventions import datadog.trace.instrumentation.apachehttpasyncclient.ApacheHttpAsyncClientDecorator +import org.apache.http.HttpHost import org.apache.http.HttpResponse import org.apache.http.client.config.RequestConfig +import org.apache.http.client.utils.URIBuilder import org.apache.http.concurrent.FutureCallback import org.apache.http.impl.nio.client.HttpAsyncClients import org.apache.http.message.BasicHeader @@ -29,9 +32,17 @@ abstract class ApacheHttpAsyncClientTest extends HttpClientTest { client.start() } + protected HttpUriRequest createRequest(String method, URI uri) { + new HttpUriRequest(method, uri) + } + + protected HttpResponse executeRequest(HttpUriRequest request, URI uri, FutureCallback handler) { + client.execute(request, handler).get() + } + @Override int doRequest(String method, URI uri, Map headers, String body, Closure callback) { - def request = new HttpUriRequest(method, uri) + def request = createRequest(method, uri) headers.entrySet().each { request.addHeader(new BasicHeader(it.key, it.value)) } @@ -58,7 +69,7 @@ abstract class ApacheHttpAsyncClientTest extends HttpClientTest { } try { - def response = client.execute(request, handler).get() + def response = executeRequest(request, uri, handler) response.entity?.content?.close() // Make sure the connection is closed. latch.await() response.statusLine.statusCode @@ -83,8 +94,49 @@ abstract class ApacheHttpAsyncClientTest extends HttpClientTest { } } -class ApacheHttpAsyncClientV0ForkedTest extends ApacheHttpAsyncClientTest implements TestingGenericHttpNamingConventions.ClientV0 { +class ApacheHttpAsyncClientV0Test extends ApacheHttpAsyncClientTest implements TestingGenericHttpNamingConventions.ClientV0 { } class ApacheHttpAsyncClientV1ForkedTest extends ApacheHttpAsyncClientTest implements TestingGenericHttpNamingConventions.ClientV1 { } + +class ApacheHttpAsyncClientHostRequestTest extends ApacheHttpAsyncClientV0Test { + + def relativizeUri(URI uri) { + new URIBuilder(uri).setHost(null).setPort(-1).setScheme(null).build() + } + + @Override + protected HttpUriRequest createRequest(String method, URI uri) { + new HttpUriRequest(method, relativizeUri(uri)) + } + + @Override + protected HttpResponse executeRequest(HttpUriRequest request, URI uri, FutureCallback handler) { + client.execute(new HttpHost(uri.getHost(), uri.getPort()), request, handler).get() + } +} + +class ApacheHttpAsyncClientHostRequestLegacyForkedTest extends ApacheHttpAsyncClientHostRequestTest { + @Override + void setup() { + injectSysConfig("httpasyncclient4.legacy.tracing.enabled", "true") + } + + @Override + void clientSpan( + TraceAssert trace, + Object parentSpan, + String method, + boolean renameService, + boolean tagQueryString, + URI uri, + Integer status, + boolean error, + Throwable exception, + boolean ignorePeer, + Map extraTags) { + super.clientSpan(trace, parentSpan, method, false, // spit-by-host is also buggy since host info is missing + tagQueryString, relativizeUri(uri), status, error, exception, true, extraTags) + } +} diff --git a/dd-java-agent/instrumentation/apache-httpclient-4/src/main/java/datadog/trace/instrumentation/apachehttpclient/HostAndRequestAsHttpUriRequest.java b/dd-java-agent/instrumentation/apache-httpclient-4/src/main/java/datadog/trace/instrumentation/apachehttpclient/HostAndRequestAsHttpUriRequest.java index 87ec223000b..4d7a7ffc929 100644 --- a/dd-java-agent/instrumentation/apache-httpclient-4/src/main/java/datadog/trace/instrumentation/apachehttpclient/HostAndRequestAsHttpUriRequest.java +++ b/dd-java-agent/instrumentation/apache-httpclient-4/src/main/java/datadog/trace/instrumentation/apachehttpclient/HostAndRequestAsHttpUriRequest.java @@ -1,7 +1,6 @@ package datadog.trace.instrumentation.apachehttpclient; -import java.net.URI; -import java.net.URISyntaxException; +import datadog.trace.bootstrap.instrumentation.api.URIUtils; import org.apache.http.Header; import org.apache.http.HttpHost; import org.apache.http.HttpRequest; @@ -25,14 +24,7 @@ public HostAndRequestAsHttpUriRequest(final HttpHost httpHost, final HttpRequest method = httpRequest.getRequestLine().getMethod(); requestLine = httpRequest.getRequestLine(); protocolVersion = requestLine.getProtocolVersion(); - - URI calculatedURI; - try { - calculatedURI = new URI(httpHost.toURI() + httpRequest.getRequestLine().getUri()); - } catch (final URISyntaxException e) { - calculatedURI = null; - } - uri = calculatedURI; + uri = URIUtils.safeConcat(httpHost.toURI(), requestLine.getUri()); actualRequest = httpRequest; } diff --git a/dd-java-agent/instrumentation/armeria-grpc/build.gradle b/dd-java-agent/instrumentation/armeria-grpc/build.gradle new file mode 100644 index 00000000000..f6a9d253ee3 --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/build.gradle @@ -0,0 +1,65 @@ +plugins { + id 'com.google.protobuf' version '0.8.18' +} + +muzzle { + pass { + group = "com.linecorp.armeria" + module = "armeria-grpc" + versions = "[0.84.0,)" + assertInverse true + skipVersions += "1.3.0" // com.linecorp.armeria.common.grpc.protocol.ArmeriaMessageDeframer is missing in this one version + } +} + +apply from: "$rootDir/gradle/java.gradle" +apply plugin: 'idea' + +// First version with Mac M1 support +def protocVersion = '3.17.3' +def grpcVersion = '1.42.2' +protobuf { + protoc { + // Download compiler rather than using locally installed version: + // First version with Mac M1 support + artifact = "com.google.protobuf:protoc:${protocVersion}" + } + plugins { + // First version with aarch support + grpc { + artifact = "io.grpc:protoc-gen-grpc-java:${grpcVersion}" + } + } + generateProtoTasks { + all()*.plugins { + grpc {} + } + } +} + +addTestSuiteForDir('latestDepTest', 'test') +addTestSuiteExtendingForDir('latestDepForkedTest', 'latestDepTest', 'test') + +apply from: "$rootDir/gradle/configure_tests.gradle" + +latestDepTest { + finalizedBy 'latestDepForkedTest' +} + +dependencies { + compileOnly group: 'com.linecorp.armeria', name: 'armeria-grpc', version: '0.84.0' + compileOnly group: 'com.linecorp.armeria', name: 'armeria-grpc-protocol', version: '0.84.0' + + testImplementation group: 'com.linecorp.armeria', name: 'armeria-grpc', version: '1.0.0' + testImplementation group: 'com.linecorp.armeria', name: 'armeria-junit4', version: '1.0.0' + testImplementation group: 'com.google.protobuf', name: 'protobuf-java', version: protocVersion + testImplementation group: 'io.grpc', name: 'grpc-stub', version: grpcVersion + testImplementation group: 'javax.annotation', name: 'javax.annotation-api', version: '1.3.2' + testImplementation project(':dd-java-agent:instrumentation:grpc-1.5') + testImplementation project(':dd-java-agent:instrumentation:netty-3.8') + testImplementation project(':dd-java-agent:instrumentation:netty-4.0') + testImplementation project(':dd-java-agent:instrumentation:netty-4.1') + + latestDepTestImplementation sourceSets.test.output // include the protobuf generated classes + latestDepTestImplementation group: 'com.linecorp.armeria', name: 'armeria-grpc', version: '1.+' +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/ArmeriaMessageDeframerInstrumentation.java b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/ArmeriaMessageDeframerInstrumentation.java new file mode 100644 index 00000000000..d9253f2244d --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/ArmeriaMessageDeframerInstrumentation.java @@ -0,0 +1,119 @@ +package datadog.trace.instrumentation.armeria.grpc.client; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.HierarchyMatchers.extendsClass; +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; +import static net.bytebuddy.matcher.ElementMatchers.isConstructor; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; + +import com.google.auto.service.AutoService; +import com.linecorp.armeria.common.grpc.protocol.ArmeriaMessageDeframer; +import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.bootstrap.InstrumentationContext; +import datadog.trace.bootstrap.instrumentation.api.AgentScope; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import io.grpc.ClientCall; +import java.util.HashMap; +import java.util.Map; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; + +@AutoService(Instrumenter.class) +public class ArmeriaMessageDeframerInstrumentation extends Instrumenter.Tracing + implements Instrumenter.ForTypeHierarchy { + public ArmeriaMessageDeframerInstrumentation() { + super("armeria-grpc-client", "armeria-grpc", "armeria", "grpc-client", "grpc"); + } + + @Override + public String hierarchyMarkerType() { + return "com.linecorp.armeria.common.grpc.protocol.ArmeriaMessageDeframer"; + } + + @Override + public ElementMatcher hierarchyMatcher() { + return named(hierarchyMarkerType()).or(extendsClass(named(hierarchyMarkerType()))); + } + + @Override + public Map contextStore() { + Map contextStore = new HashMap<>(4); + contextStore.put("io.grpc.ClientCall", AgentSpan.class.getName()); + contextStore.put(hierarchyMarkerType(), "io.grpc.ClientCall"); + return contextStore; + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isConstructor() + .and( + takesArgument( + 0, + named( + "com.linecorp.armeria.common.grpc.protocol.ArmeriaMessageDeframer$Listener"))), + getClass().getName() + "$CaptureClientCallArg0"); + transformation.applyAdvice( + isConstructor() + .and( + takesArgument( + 2, named("com.linecorp.armeria.internal.common.grpc.TransportStatusListener"))), + getClass().getName() + "$CaptureClientCallArg2"); + transformation.applyAdvice( + isMethod().and(named("process").or(named("deframe"))), + getClass().getName() + "$ActivateSpan"); + } + + public static final class CaptureClientCallArg0 { + @SuppressWarnings("rawtypes") + @Advice.OnMethodExit + public static void capture( + @Advice.This ArmeriaMessageDeframer messageDeframer, + @Advice.Argument(0) Object clientCall) { + if (clientCall instanceof ClientCall) { + InstrumentationContext.get(ArmeriaMessageDeframer.class, ClientCall.class) + .put(messageDeframer, (ClientCall) clientCall); + } + } + } + + public static final class CaptureClientCallArg2 { + @SuppressWarnings("rawtypes") + @Advice.OnMethodExit + public static void capture( + @Advice.This ArmeriaMessageDeframer messageDeframer, + @Advice.Argument(2) Object clientCall) { + if (clientCall instanceof ClientCall) { + InstrumentationContext.get(ArmeriaMessageDeframer.class, ClientCall.class) + .put(messageDeframer, (ClientCall) clientCall); + } + } + } + + public static final class ActivateSpan { + @SuppressWarnings("rawtypes") + @Advice.OnMethodEnter + public static AgentScope before(@Advice.This ArmeriaMessageDeframer messageDeframer) { + ClientCall clientCall = + InstrumentationContext.get(ArmeriaMessageDeframer.class, ClientCall.class) + .get(messageDeframer); + if (clientCall != null) { + AgentSpan span = + InstrumentationContext.get(ClientCall.class, AgentSpan.class).get(clientCall); + if (null != span) { + return activateSpan(span); + } + } + return null; + } + + @Advice.OnMethodExit(onThrowable = Throwable.class) + public static void after(@Advice.Enter AgentScope scope) { + if (null != scope) { + scope.close(); + } + } + } +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/ClientCallImplInstrumentation.java b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/ClientCallImplInstrumentation.java new file mode 100644 index 00000000000..1c0eaa475b9 --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/ClientCallImplInstrumentation.java @@ -0,0 +1,237 @@ +package datadog.trace.instrumentation.armeria.grpc.client; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.propagate; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; +import static datadog.trace.instrumentation.armeria.grpc.client.GrpcClientDecorator.CLIENT_PATHWAY_EDGE_TAGS; +import static datadog.trace.instrumentation.armeria.grpc.client.GrpcClientDecorator.DECORATE; +import static datadog.trace.instrumentation.armeria.grpc.client.GrpcClientDecorator.GRPC_MESSAGE; +import static datadog.trace.instrumentation.armeria.grpc.client.GrpcClientDecorator.OPERATION_NAME; +import static datadog.trace.instrumentation.armeria.grpc.client.GrpcInjectAdapter.SETTER; +import static net.bytebuddy.matcher.ElementMatchers.isConstructor; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.agent.tooling.muzzle.Reference; +import datadog.trace.bootstrap.InstrumentationContext; +import datadog.trace.bootstrap.instrumentation.api.AgentScope; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import io.grpc.ClientCall; +import io.grpc.Metadata; +import io.grpc.MethodDescriptor; +import io.grpc.Status; +import io.grpc.StatusException; +import java.util.Collections; +import java.util.Map; +import net.bytebuddy.asm.Advice; + +@AutoService(Instrumenter.class) +public final class ClientCallImplInstrumentation extends Instrumenter.Tracing + implements Instrumenter.ForSingleType { + + public ClientCallImplInstrumentation() { + super("armeria-grpc-client", "armeria-grpc", "armeria", "grpc-client", "grpc"); + } + + @Override + public Map contextStore() { + return Collections.singletonMap("io.grpc.ClientCall", AgentSpan.class.getName()); + } + + @Override + public String instrumentedType() { + return "com.linecorp.armeria.internal.client.grpc.ArmeriaClientCall"; + } + + @Override + public Reference[] additionalMuzzleReferences() { + return new Reference[] { + new Reference( + new String[0], + 1, + "com.linecorp.armeria.common.grpc.protocol.ArmeriaMessageDeframer", + null, + new String[0], + new Reference.Field[0], + new Reference.Method[0]) + }; + } + + @Override + public String[] helperClassNames() { + return new String[] { + packageName + ".GrpcClientDecorator", + packageName + ".GrpcClientDecorator$1", + packageName + ".GrpcInjectAdapter" + }; + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isConstructor().and(takesArgument(4, named("io.grpc.MethodDescriptor"))), + getClass().getName() + "$CaptureCall"); + transformation.applyAdvice(named("start").and(isMethod()), getClass().getName() + "$Start"); + transformation.applyAdvice(named("cancel").and(isMethod()), getClass().getName() + "$Cancel"); + transformation.applyAdvice( + named("request") + .and(isMethod()) + .and(takesArguments(int.class)) + .or(isMethod().and(named("halfClose").and(takesArguments(0)))), + getClass().getName() + "$ActivateSpan"); + transformation.applyAdvice( + named("sendMessage").and(isMethod()), getClass().getName() + "$SendMessage"); + transformation.applyAdvice( + named("close").and(isMethod().and(takesArguments(2))), + getClass().getName() + "$CloseObserver"); + transformation.applyAdvice( + named("onNext").or(named("messageRead")), getClass().getName() + "$ReceiveMessages"); + } + + public static final class CaptureCall { + @Advice.OnMethodExit + public static void capture( + @Advice.This ClientCall call, @Advice.Argument(4) MethodDescriptor method) { + AgentSpan span = DECORATE.startCall(method); + if (null != span) { + InstrumentationContext.get(ClientCall.class, AgentSpan.class).put(call, span); + } + } + } + + public static final class Start { + @Advice.OnMethodEnter + public static AgentScope before( + @Advice.This ClientCall call, + @Advice.Argument(0) ClientCall.Listener responseListener, + @Advice.Argument(1) Metadata headers, + @Advice.Local("$$ddSpan") AgentSpan span) { + if (null != responseListener && null != headers) { + span = InstrumentationContext.get(ClientCall.class, AgentSpan.class).get(call); + if (null != span) { + propagate().inject(span, headers, SETTER); + propagate().injectPathwayContext(span, headers, SETTER, CLIENT_PATHWAY_EDGE_TAGS); + return activateSpan(span); + } + } + return null; + } + + @Advice.OnMethodExit(onThrowable = Throwable.class) + public static void after( + @Advice.Enter AgentScope scope, + @Advice.Thrown Throwable error, + @Advice.Local("$$ddSpan") AgentSpan span) + throws Throwable { + if (null != scope) { + scope.close(); + } + if (null != error && null != span) { + DECORATE.onError(span, error); + DECORATE.beforeFinish(span); + span.finish(); + throw error; + } + } + } + + public static final class ActivateSpan { + @Advice.OnMethodEnter + public static AgentScope before(@Advice.This ClientCall call) { + AgentSpan span = InstrumentationContext.get(ClientCall.class, AgentSpan.class).get(call); + if (null != span) { + return activateSpan(span); + } + return null; + } + + @Advice.OnMethodExit(onThrowable = Throwable.class) + public static void after(@Advice.Enter AgentScope scope) { + if (null != scope) { + scope.close(); + } + } + } + + public static final class SendMessage { + @Advice.OnMethodEnter + public static AgentScope before(@Advice.This ClientCall call) { + // could create a message span here for the request + AgentSpan span = InstrumentationContext.get(ClientCall.class, AgentSpan.class).get(call); + if (span != null) { + return activateSpan(span); + } + return null; + } + + @Advice.OnMethodExit(onThrowable = Throwable.class) + public static void after(@Advice.Enter AgentScope scope) { + if (null != scope) { + scope.close(); + } + } + } + + public static final class Cancel { + @Advice.OnMethodEnter + public static void before( + @Advice.This ClientCall call, @Advice.Argument(1) Throwable cause) { + AgentSpan span = InstrumentationContext.get(ClientCall.class, AgentSpan.class).remove(call); + if (null != span) { + if (cause instanceof StatusException) { + DECORATE.onClose(span, ((StatusException) cause).getStatus()); + } + span.finish(); + } + } + } + + public static final class CloseObserver { + @Advice.OnMethodEnter + public static AgentScope before(@Advice.This ClientCall call) { + // could create a message span here for the request + AgentSpan span = InstrumentationContext.get(ClientCall.class, AgentSpan.class).remove(call); + if (span != null) { + return activateSpan(span); + } + return null; + } + + @Advice.OnMethodExit(onThrowable = Throwable.class) + public static void closeObserver( + @Advice.Enter AgentScope scope, @Advice.Argument(0) Status status) { + if (null != scope) { + DECORATE.onClose(scope.span(), status); + scope.span().finish(); + scope.close(); + } + } + } + + public static final class ReceiveMessages { + @Advice.OnMethodEnter + public static AgentScope before() { + AgentSpan clientSpan = activeSpan(); + if (clientSpan != null && OPERATION_NAME.equals(clientSpan.getOperationName())) { + AgentSpan messageSpan = + startSpan(GRPC_MESSAGE).setTag("message.type", clientSpan.getTag("response.type")); + DECORATE.afterStart(messageSpan); + return activateSpan(messageSpan); + } + return null; + } + + @Advice.OnMethodExit(onThrowable = Throwable.class) + public static void after(@Advice.Enter AgentScope scope) { + if (null != scope) { + scope.span().finish(); + scope.close(); + } + } + } +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java new file mode 100644 index 00000000000..7a6f76d200a --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java @@ -0,0 +1,124 @@ +package datadog.trace.instrumentation.armeria.grpc.client; + +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; +import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; +import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; +import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; + +import datadog.trace.api.Config; +import datadog.trace.api.GenericClassValue; +import datadog.trace.api.cache.DDCache; +import datadog.trace.api.cache.DDCaches; +import datadog.trace.api.naming.SpanNaming; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; +import datadog.trace.bootstrap.instrumentation.api.Tags; +import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; +import datadog.trace.bootstrap.instrumentation.decorator.ClientDecorator; +import io.grpc.MethodDescriptor; +import io.grpc.Status; +import java.util.BitSet; +import java.util.LinkedHashMap; +import java.util.Set; +import java.util.function.Function; + +public class GrpcClientDecorator extends ClientDecorator { + public static final CharSequence OPERATION_NAME = + UTF8BytesString.create( + SpanNaming.instance().namingSchema().client().operationForProtocol("grpc")); + public static final CharSequence COMPONENT_NAME = UTF8BytesString.create("armeria-grpc-client"); + public static final CharSequence GRPC_MESSAGE = UTF8BytesString.create("grpc.message"); + + private static LinkedHashMap createClientPathwaySortedTags() { + LinkedHashMap result = new LinkedHashMap<>(); + result.put(DIRECTION_TAG, DIRECTION_OUT); + result.put(TYPE_TAG, "grpc"); + return result; + } + + public static final LinkedHashMap CLIENT_PATHWAY_EDGE_TAGS = + createClientPathwaySortedTags(); + + public static final GrpcClientDecorator DECORATE = new GrpcClientDecorator(); + + private static final Set IGNORED_METHODS = Config.get().getGrpcIgnoredOutboundMethods(); + private static final BitSet CLIENT_ERROR_STATUSES = Config.get().getGrpcClientErrorStatuses(); + + private static final ClassValue MESSAGE_TYPES = + GenericClassValue.of( + // Uses inner class for predictable name for Instrumenter.Default.helperClassNames() + new Function, UTF8BytesString>() { + @Override + public UTF8BytesString apply(Class input) { + return UTF8BytesString.create(input.getName()); + } + }); + + private static final DDCache RPC_SERVICE_CACHE = DDCaches.newFixedSizeCache(64); + + public UTF8BytesString requestMessageType(MethodDescriptor method) { + return messageType(method.getRequestMarshaller()); + } + + public UTF8BytesString responseMessageType(MethodDescriptor method) { + return messageType(method.getResponseMarshaller()); + } + + private UTF8BytesString messageType(MethodDescriptor.Marshaller marshaller) { + return marshaller instanceof MethodDescriptor.ReflectableMarshaller + ? MESSAGE_TYPES.get( + ((MethodDescriptor.ReflectableMarshaller) marshaller).getMessageClass()) + : null; + } + + @Override + protected String[] instrumentationNames() { + return new String[] {"armeria-grpc-client", "armeria-grpc", "armeria", "grpc-client", "grpc"}; + } + + @Override + protected CharSequence component() { + return COMPONENT_NAME; + } + + @Override + protected CharSequence spanType() { + return InternalSpanTypes.RPC; + } + + @Override + protected String service() { + return null; + } + + public AgentSpan startCall(MethodDescriptor method) { + if (IGNORED_METHODS.contains(method.getFullMethodName())) { + return null; + } + AgentSpan span = + startSpan(OPERATION_NAME) + .setTag("request.type", requestMessageType(method)) + .setTag("response.type", responseMessageType(method)) + // method.getServiceName() may not be available on some grpc versions + .setTag( + Tags.RPC_SERVICE, + RPC_SERVICE_CACHE.computeIfAbsent( + method.getFullMethodName(), MethodDescriptor::extractFullServiceName)); + span.setResourceName(method.getFullMethodName()); + return afterStart(span); + } + + public AgentSpan onClose(final AgentSpan span, final Status status) { + + span.setTag("status.code", status.getCode().name()); + span.setTag("status.description", status.getDescription()); + + // TODO why is there a mismatch between client / server for calling the onError method? + onError(span, status.getCause()); + if (CLIENT_ERROR_STATUSES.get(status.getCode().value())) { + span.setError(true); + } + + return span; + } +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcInjectAdapter.java b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcInjectAdapter.java new file mode 100644 index 00000000000..829035ae383 --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcInjectAdapter.java @@ -0,0 +1,14 @@ +package datadog.trace.instrumentation.armeria.grpc.client; + +import datadog.trace.bootstrap.instrumentation.api.AgentPropagation; +import io.grpc.Metadata; + +public final class GrpcInjectAdapter implements AgentPropagation.Setter { + + public static final GrpcInjectAdapter SETTER = new GrpcInjectAdapter(); + + @Override + public void set(final Metadata carrier, final String key, final String value) { + carrier.put(Metadata.Key.of(key, Metadata.ASCII_STRING_MARSHALLER), value); + } +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcExtractAdapter.java b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcExtractAdapter.java new file mode 100644 index 00000000000..e358e7a0632 --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcExtractAdapter.java @@ -0,0 +1,21 @@ +package datadog.trace.instrumentation.armeria.grpc.server; + +import datadog.trace.bootstrap.instrumentation.api.AgentPropagation; +import io.grpc.Metadata; + +public final class GrpcExtractAdapter implements AgentPropagation.ContextVisitor { + + public static final GrpcExtractAdapter GETTER = new GrpcExtractAdapter(); + + @Override + public void forEachKey(Metadata carrier, AgentPropagation.KeyClassifier classifier) { + for (String key : carrier.keys()) { + if (!key.endsWith(Metadata.BINARY_HEADER_SUFFIX) && !key.startsWith(":")) { + if (!classifier.accept( + key, carrier.get(Metadata.Key.of(key, Metadata.ASCII_STRING_MARSHALLER)))) { + return; + } + } + } + } +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java new file mode 100644 index 00000000000..de5058948e2 --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java @@ -0,0 +1,111 @@ +package datadog.trace.instrumentation.armeria.grpc.server; + +import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; +import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; +import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; + +import datadog.trace.api.Config; +import datadog.trace.api.cache.DDCache; +import datadog.trace.api.cache.DDCaches; +import datadog.trace.api.naming.SpanNaming; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; +import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; +import datadog.trace.bootstrap.instrumentation.decorator.ServerDecorator; +import io.grpc.ServerCall; +import io.grpc.Status; +import java.util.BitSet; +import java.util.LinkedHashMap; +import java.util.function.Function; + +public class GrpcServerDecorator extends ServerDecorator { + + private static final boolean TRIM_RESOURCE_PACKAGE_NAME = + Config.get().isGrpcServerTrimPackageResource(); + private static final BitSet SERVER_ERROR_STATUSES = Config.get().getGrpcServerErrorStatuses(); + + public static final CharSequence GRPC_SERVER = + UTF8BytesString.create( + SpanNaming.instance().namingSchema().server().operationForProtocol("grpc")); + public static final CharSequence COMPONENT_NAME = UTF8BytesString.create("armeria-grpc-server"); + public static final CharSequence GRPC_MESSAGE = UTF8BytesString.create("grpc.message"); + + private static final LinkedHashMap createServerPathwaySortedTags() { + LinkedHashMap result = new LinkedHashMap<>(); + result.put(DIRECTION_TAG, DIRECTION_IN); + result.put(TYPE_TAG, "grpc"); + return result; + } + + public static final LinkedHashMap SERVER_PATHWAY_EDGE_TAGS = + createServerPathwaySortedTags(); + public static final GrpcServerDecorator DECORATE = new GrpcServerDecorator(); + + private static final Function NORMALIZE = + // Uses inner class for predictable name for Instrumenter.Default.helperClassNames() + new Function() { + @Override + public String apply(String fullName) { + int index = fullName.lastIndexOf("."); + if (index > 0) { + return fullName.substring(index + 1); + } else { + return fullName; + } + } + }; + + private final DDCache cachedResourceNames; + + public GrpcServerDecorator() { + if (TRIM_RESOURCE_PACKAGE_NAME) { + cachedResourceNames = DDCaches.newFixedSizeCache(512); + } else { + cachedResourceNames = null; + } + } + + @Override + protected String[] instrumentationNames() { + return new String[] {"armeria-grpc-server", "armeria-grpc", "armeria", "grpc-server", "grpc"}; + } + + @Override + protected CharSequence spanType() { + return InternalSpanTypes.RPC; + } + + @Override + protected CharSequence component() { + return COMPONENT_NAME; + } + + @Override + public AgentSpan afterStart(final AgentSpan span) { + span.setMeasured(true); + return super.afterStart(span); + } + + public AgentSpan onCall(final AgentSpan span, ServerCall call) { + if (TRIM_RESOURCE_PACKAGE_NAME) { + span.setResourceName( + cachedResourceNames.computeIfAbsent( + call.getMethodDescriptor().getFullMethodName(), NORMALIZE)); + } else { + span.setResourceName(call.getMethodDescriptor().getFullMethodName()); + } + return span; + } + + public AgentSpan onClose(final AgentSpan span, final Status status) { + span.setTag("status.code", status.getCode().name()); + span.setTag("status.description", status.getDescription()); + + if (SERVER_ERROR_STATUSES.get(status.getCode().value())) { + onError(span, status.getCause()); + span.setError(true); + } + + return span; + } +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/HandlerRegistryBuilderInstrumentation.java b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/HandlerRegistryBuilderInstrumentation.java new file mode 100644 index 00000000000..521c54d83d5 --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/HandlerRegistryBuilderInstrumentation.java @@ -0,0 +1,70 @@ +package datadog.trace.instrumentation.armeria.grpc.server; + +import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; + +import com.google.auto.service.AutoService; +import datadog.trace.agent.tooling.Instrumenter; +import datadog.trace.agent.tooling.muzzle.Reference; +import io.grpc.ServerInterceptors; +import io.grpc.ServerServiceDefinition; +import net.bytebuddy.asm.Advice; + +@AutoService(Instrumenter.class) +public class HandlerRegistryBuilderInstrumentation extends Instrumenter.Tracing + implements Instrumenter.ForSingleType { + public HandlerRegistryBuilderInstrumentation() { + super("armeria-grpc-server", "armeria-grpc", "armeria", "grpc-server", "grpc"); + } + + @Override + public String instrumentedType() { + return "com.linecorp.armeria.server.grpc.HandlerRegistry$Builder"; + } + + @Override + public Reference[] additionalMuzzleReferences() { + return new Reference[] { + new Reference( + new String[0], + 1, + "com.linecorp.armeria.common.grpc.protocol.ArmeriaMessageDeframer", + null, + new String[0], + new Reference.Field[0], + new Reference.Method[0]) + }; + } + + @Override + public String[] helperClassNames() { + return new String[] { + packageName + ".GrpcServerDecorator", + packageName + ".GrpcServerDecorator$1", + packageName + ".GrpcExtractAdapter", + packageName + ".TracingServerInterceptor", + packageName + ".TracingServerInterceptor$TracingServerCall", + packageName + ".TracingServerInterceptor$TracingServerCallListener", + }; + } + + @Override + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice( + isMethod() + .and(named("addService")) + .and(takesArgument(0, named("io.grpc.ServerServiceDefinition"))), + getClass().getName() + "$AddService"); + } + + public static final class AddService { + @Advice.OnMethodEnter + public static void before( + @Advice.Argument(value = 0, readOnly = false) + ServerServiceDefinition serverServiceDefinition) { + serverServiceDefinition = + ServerInterceptors.intercept(serverServiceDefinition, TracingServerInterceptor.INSTANCE); + } + } +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/TracingServerInterceptor.java b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/TracingServerInterceptor.java new file mode 100644 index 00000000000..158c7c72d01 --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/TracingServerInterceptor.java @@ -0,0 +1,349 @@ +package datadog.trace.instrumentation.armeria.grpc.server; + +import static datadog.trace.api.gateway.Events.EVENTS; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.propagate; +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; +import static datadog.trace.instrumentation.armeria.grpc.server.GrpcExtractAdapter.GETTER; +import static datadog.trace.instrumentation.armeria.grpc.server.GrpcServerDecorator.DECORATE; +import static datadog.trace.instrumentation.armeria.grpc.server.GrpcServerDecorator.GRPC_MESSAGE; +import static datadog.trace.instrumentation.armeria.grpc.server.GrpcServerDecorator.GRPC_SERVER; +import static datadog.trace.instrumentation.armeria.grpc.server.GrpcServerDecorator.SERVER_PATHWAY_EDGE_TAGS; + +import datadog.trace.api.Config; +import datadog.trace.api.function.TriConsumer; +import datadog.trace.api.function.TriFunction; +import datadog.trace.api.gateway.CallbackProvider; +import datadog.trace.api.gateway.Flow; +import datadog.trace.api.gateway.IGSpanInfo; +import datadog.trace.api.gateway.RequestContext; +import datadog.trace.api.gateway.RequestContextSlot; +import datadog.trace.bootstrap.instrumentation.api.AgentScope; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan.Context; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; +import datadog.trace.bootstrap.instrumentation.api.TagContext; +import io.grpc.ForwardingServerCall; +import io.grpc.ForwardingServerCallListener; +import io.grpc.Grpc; +import io.grpc.Metadata; +import io.grpc.ServerCall; +import io.grpc.ServerCallHandler; +import io.grpc.ServerInterceptor; +import io.grpc.Status; +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.util.Set; +import java.util.concurrent.CancellationException; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.function.Supplier; +import javax.annotation.Nonnull; + +public class TracingServerInterceptor implements ServerInterceptor { + + public static final TracingServerInterceptor INSTANCE = new TracingServerInterceptor(); + private static final Set IGNORED_METHODS = Config.get().getGrpcIgnoredInboundMethods(); + + private TracingServerInterceptor() {} + + protected static AgentTracer.TracerAPI tracer() { + return AgentTracer.get(); + } + + @Override + public ServerCall.Listener interceptCall( + final ServerCall call, + final Metadata headers, + final ServerCallHandler next) { + if (IGNORED_METHODS.contains(call.getMethodDescriptor().getFullMethodName())) { + return next.startCall(call, headers); + } + + Context spanContext = propagate().extract(headers, GETTER); + AgentTracer.TracerAPI tracer = tracer(); + spanContext = callIGCallbackRequestStarted(tracer, spanContext); + + CallbackProvider cbp = tracer.getCallbackProvider(RequestContextSlot.APPSEC); + final AgentSpan span = + startSpan(DECORATE.instrumentationNames()[0], GRPC_SERVER, spanContext).setMeasured(true); + + AgentTracer.get() + .getDataStreamsMonitoring() + .setCheckpoint(span, SERVER_PATHWAY_EDGE_TAGS, 0, 0); + + RequestContext reqContext = span.getRequestContext(); + if (reqContext != null) { + callIGCallbackClientAddress(cbp, reqContext, call); + callIGCallbackHeaders(cbp, reqContext, headers); + } + + DECORATE.afterStart(span); + DECORATE.onCall(span, call); + + final ServerCall.Listener result; + try (AgentScope scope = activateSpan(span)) { + // Wrap the server call so that we can decorate the span + // with the resulting status + final TracingServerCall tracingServerCall = new TracingServerCall<>(span, call); + // call other interceptors + result = next.startCall(tracingServerCall, headers); + } catch (final Throwable e) { + if (span.phasedFinish()) { + DECORATE.onError(span, e); + DECORATE.beforeFinish(span); + callIGCallbackRequestEnded(span); + span.publish(); + } + throw e; + } + + // This ensures the server implementation can see the span in scope + return new TracingServerCallListener<>(span, result); + } + + static final class TracingServerCall + extends ForwardingServerCall.SimpleForwardingServerCall { + final AgentSpan span; + + TracingServerCall(final AgentSpan span, final ServerCall delegate) { + super(delegate); + this.span = span; + } + + @Override + public void close(final Status status, final Metadata trailers) { + DECORATE.onClose(span, status); + try (final AgentScope scope = activateSpan(span)) { + delegate().close(status, trailers); + } catch (final Throwable e) { + DECORATE.onError(span, e); + throw e; + } finally { + if (span.phasedFinish()) { + DECORATE.beforeFinish(span); + callIGCallbackRequestEnded(span); + span.publish(); + } + } + } + } + + public static final class TracingServerCallListener + extends ForwardingServerCallListener.SimpleForwardingServerCallListener { + private final AgentSpan span; + + TracingServerCallListener(final AgentSpan span, final ServerCall.Listener delegate) { + super(delegate); + this.span = span; + } + + @Override + public void onMessage(final ReqT message) { + final AgentSpan msgSpan = + startSpan(DECORATE.instrumentationNames()[0], GRPC_MESSAGE, this.span.context()) + .setTag("message.type", message.getClass().getName()); + DECORATE.afterStart(msgSpan); + try (AgentScope scope = activateSpan(msgSpan)) { + callIGCallbackGrpcMessage(msgSpan, message); + delegate().onMessage(message); + } catch (final Throwable e) { + // I'm not convinced we should actually be finishing the span here... + if (span.phasedFinish()) { + DECORATE.onError(msgSpan, e); + DECORATE.beforeFinish(span); + callIGCallbackRequestEnded(span); + span.publish(); + } + throw e; + } finally { + DECORATE.beforeFinish(msgSpan); + msgSpan.finish(); + } + } + + @Override + public void onHalfClose() { + try (final AgentScope scope = activateSpan(span)) { + delegate().onHalfClose(); + } catch (final Throwable e) { + if (span.phasedFinish()) { + DECORATE.onError(span, e); + DECORATE.beforeFinish(span); + callIGCallbackRequestEnded(span); + span.publish(); + } + throw e; + } + } + + @Override + public void onCancel() { + // Finishes span. + try (final AgentScope scope = activateSpan(span)) { + delegate().onCancel(); + span.setTag("canceled", true); + } catch (CancellationException e) { + // No need to report an exception or mark as error that it was canceled. + throw e; + } catch (final Throwable e) { + DECORATE.onError(span, e); + throw e; + } finally { + if (span.phasedFinish()) { + DECORATE.beforeFinish(span); + callIGCallbackRequestEnded(span); + span.publish(); + } + } + } + + @Override + public void onComplete() { + // Finishes span. + try (final AgentScope scope = activateSpan(span)) { + delegate().onComplete(); + } catch (final Throwable e) { + DECORATE.onError(span, e); + throw e; + } finally { + /** + * grpc has quite a few states that can finish the span. rather than track down the correct + * combination of them to exclusively finish the span, use phasedFinish. + */ + if (span.phasedFinish()) { + DECORATE.beforeFinish(span); + callIGCallbackRequestEnded(span); + span.publish(); + } + } + } + + @Override + public void onReady() { + try (final AgentScope scope = activateSpan(span)) { + delegate().onReady(); + } catch (final Throwable e) { + if (span.phasedFinish()) { + DECORATE.onError(span, e); + DECORATE.beforeFinish(span); + callIGCallbackRequestEnded(span); + span.publish(); + } + throw e; + } + } + } + + // IG helpers follow + + private static Context callIGCallbackRequestStarted(AgentTracer.TracerAPI cbp, Context context) { + Supplier> startedCbAppSec = + cbp.getCallbackProvider(RequestContextSlot.APPSEC).getCallback(EVENTS.requestStarted()); + Supplier> startedCbIast = + cbp.getCallbackProvider(RequestContextSlot.IAST).getCallback(EVENTS.requestStarted()); + + if (startedCbAppSec == null && startedCbIast == null) { + return context; + } + + TagContext tagContext = null; + if (context == null) { + tagContext = new TagContext(); + } else if (context instanceof TagContext) { + tagContext = (TagContext) context; + } + if (tagContext != null) { + if (startedCbAppSec != null) { + Flow flowAppSec = startedCbAppSec.get(); + tagContext.withRequestContextDataAppSec(flowAppSec.getResult()); + } + if (startedCbIast != null) { + Flow flowIast = startedCbIast.get(); + tagContext.withRequestContextDataIast(flowIast.getResult()); + } + return tagContext; + } + + return context; + } + + private static void callIGCallbackClientAddress( + CallbackProvider cbp, RequestContext ctx, ServerCall call) { + SocketAddress socketAddress = call.getAttributes().get(Grpc.TRANSPORT_ATTR_REMOTE_ADDR); + TriFunction> cb = + cbp.getCallback(EVENTS.requestClientSocketAddress()); + if (socketAddress == null || !(socketAddress instanceof InetSocketAddress) || cb == null) { + return; + } + + InetSocketAddress inetSockAddr = (InetSocketAddress) socketAddress; + cb.apply(ctx, inetSockAddr.getHostString(), inetSockAddr.getPort()); + } + + private static void callIGCallbackHeaders( + CallbackProvider cbp, RequestContext reqCtx, Metadata metadata) { + TriConsumer headerCb = cbp.getCallback(EVENTS.requestHeader()); + Function> headerEndCb = cbp.getCallback(EVENTS.requestHeaderDone()); + if (headerCb == null || headerEndCb == null) { + return; + } + for (String key : metadata.keys()) { + if (!key.endsWith(Metadata.BINARY_HEADER_SUFFIX) && !key.startsWith(":")) { + Metadata.Key mdKey = Metadata.Key.of(key, Metadata.ASCII_STRING_MARSHALLER); + for (String value : metadata.getAll(mdKey)) { + headerCb.accept(reqCtx, key, value); + } + } + } + + headerEndCb.apply(reqCtx); + } + + private static void callIGCallbackRequestEnded(@Nonnull final AgentSpan span) { + CallbackProvider cbp = tracer().getUniversalCallbackProvider(); + if (cbp == null) { + return; + } + RequestContext requestContext = span.getRequestContext(); + if (requestContext != null) { + BiFunction> callback = + cbp.getCallback(EVENTS.requestEnded()); + if (callback != null) { + callback.apply(requestContext, span); + } + } + } + + private static void callIGCallbackGrpcMessage(@Nonnull final AgentSpan span, Object obj) { + if (obj == null) { + return; + } + + CallbackProvider cbpAppsec = tracer().getCallbackProvider(RequestContextSlot.APPSEC); + CallbackProvider cbpIast = tracer().getCallbackProvider(RequestContextSlot.IAST); + if (cbpAppsec == null && cbpIast == null) { + return; + } + RequestContext requestContext = span.getRequestContext(); + if (requestContext == null) { + return; + } + + if (cbpAppsec != null) { + BiFunction> callback = + cbpAppsec.getCallback(EVENTS.grpcServerRequestMessage()); + if (callback != null) { + callback.apply(requestContext, obj); + } + } + + if (cbpIast != null) { + BiFunction> callback = + cbpIast.getCallback(EVENTS.grpcServerRequestMessage()); + if (callback != null) { + callback.apply(requestContext, obj); + } + } + } +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/test/groovy/ArmeriaGrpcStreamingTest.groovy b/dd-java-agent/instrumentation/armeria-grpc/src/test/groovy/ArmeriaGrpcStreamingTest.groovy new file mode 100644 index 00000000000..f07e6bdbd9d --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/test/groovy/ArmeriaGrpcStreamingTest.groovy @@ -0,0 +1,283 @@ +import com.linecorp.armeria.client.Clients +import com.linecorp.armeria.common.SessionProtocol +import com.linecorp.armeria.common.grpc.GrpcSerializationFormats +import com.linecorp.armeria.server.Server +import com.linecorp.armeria.server.ServerBuilder +import com.linecorp.armeria.server.grpc.GrpcService +import com.linecorp.armeria.testing.junit4.server.ServerRule +import datadog.trace.agent.test.naming.VersionedNamingTestBase +import datadog.trace.api.DDSpanTypes +import datadog.trace.bootstrap.instrumentation.api.Tags +import example.GreeterGrpc +import example.Helloworld +import io.grpc.stub.StreamObserver + +import java.time.Duration +import java.util.concurrent.CopyOnWriteArrayList +import java.util.concurrent.atomic.AtomicReference + +abstract class ArmeriaGrpcStreamingTest extends VersionedNamingTestBase { + + @Override + final String service() { + return null + } + + @Override + final String operation() { + return null + } + + final Duration timeoutDuration() { + return Duration.ofSeconds(5) + } + + protected abstract String clientOperation() + + protected abstract String serverOperation() + + @Override + boolean useStrictTraceWrites() { + false + } + + @Override + protected void configurePreAgent() { + super.configurePreAgent() + injectSysConfig("dd.trace.grpc.ignored.inbound.methods", "example.Greeter/IgnoreInbound") + injectSysConfig("dd.trace.grpc.ignored.outbound.methods", "example.Greeter/Ignore") + // here to trigger wrapping to record scheduling time - the logic is trivial so it's enough to verify + // that ClassCastExceptions do not arise from the wrapping + injectSysConfig("dd.profiling.enabled", "true") + } + + def "test conversation #name"() { + setup: + + def msgCount = serverMessageCount + def serverReceived = new CopyOnWriteArrayList<>() + def clientReceived = new CopyOnWriteArrayList<>() + def error = new AtomicReference() + + ServerRule serverRule = new ServerRule() { + @Override + protected void configure(ServerBuilder sb) throws Exception { + sb.service(GrpcService.builder().addService(new GreeterGrpc.GreeterImplBase() { + @Override + StreamObserver conversation(StreamObserver observer) { + return new StreamObserver() { + @Override + void onNext(Helloworld.Response value) { + + serverReceived << value.message + + (1..msgCount).each { + if (TEST_TRACER.activeScope().isAsyncPropagating()) { + observer.onNext(value) + } else { + observer.onError(new IllegalStateException("not async propagating!")) + } + } + } + + @Override + void onError(Throwable t) { + if (TEST_TRACER.activeScope().isAsyncPropagating()) { + error.set(t) + observer.onError(t) + } else { + observer.onError(new IllegalStateException("not async propagating!")) + } + } + + @Override + void onCompleted() { + if (TEST_TRACER.activeScope().isAsyncPropagating()) { + observer.onCompleted() + } else { + observer.onError(new IllegalStateException("not async propagating!")) + } + } + } + } + }).build()) + } + } + serverRule.configure(Server.builder().requestTimeout(timeoutDuration())) + serverRule.start() + + GreeterGrpc.GreeterStub client = Clients.builder(serverRule.uri(SessionProtocol.HTTP, GrpcSerializationFormats.PROTO)) + .writeTimeout(timeoutDuration()) + .responseTimeout(timeoutDuration()) + .build(GreeterGrpc.GreeterStub) + + when: + def streamObserver = client.conversation(new StreamObserver() { + @Override + void onNext(Helloworld.Response value) { + if (TEST_TRACER.activeScope().isAsyncPropagating()) { + clientReceived << value.message + } else { + error.set(new IllegalStateException("not async propagating!")) + } + } + + @Override + void onError(Throwable t) { + if (TEST_TRACER.activeScope().isAsyncPropagating()) { + error.set(t) + } else { + error.set(new IllegalStateException("not async propagating!")) + } + } + + @Override + void onCompleted() { + if (!TEST_TRACER.activeScope().isAsyncPropagating()) { + error.set(new IllegalStateException("not async propagating!")) + } + } + }) + + clientRange.each { + def message = Helloworld.Response.newBuilder().setMessage("call $it").build() + streamObserver.onNext(message) + } + streamObserver.onCompleted() + + then: + error.get() == null + TEST_WRITER.waitForTraces(2) + error.get() == null + serverReceived == clientRange.collect { "call $it" } + clientReceived == serverRange.collect { + clientRange.collect { + "call $it" + } + }.flatten().sort() + + assertTraces(2) { + trace((clientMessageCount * serverMessageCount) + 1) { + sortSpansByStart() + span { + operationName clientOperation() + resourceName "example.Greeter/Conversation" + spanType DDSpanTypes.RPC + parent() + errored false + tags { + "$Tags.COMPONENT" "armeria-grpc-client" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "$Tags.RPC_SERVICE" "example.Greeter" + "status.code" "OK" + "request.type" "example.Helloworld\$Response" + "response.type" "example.Helloworld\$Response" + peerServiceFrom(Tags.RPC_SERVICE) + defaultTags() + } + } + (1..(clientMessageCount * serverMessageCount)).each { + span { + operationName "grpc.message" + resourceName "grpc.message" + spanType DDSpanTypes.RPC + childOf span(0) + errored false + tags { + "$Tags.COMPONENT" "armeria-grpc-client" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "message.type" "example.Helloworld\$Response" + defaultTagsNoPeerService() + } + } + } + } + trace(clientMessageCount + 1) { + sortSpansByStart() + span { + operationName serverOperation() + resourceName "example.Greeter/Conversation" + spanType DDSpanTypes.RPC + childOf trace(0).get(0) + errored false + tags { + "$Tags.COMPONENT" "armeria-grpc-server" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_SERVER + "status.code" "OK" + defaultTags(true) + } + } + clientRange.each { + span { + operationName "grpc.message" + resourceName "grpc.message" + spanType DDSpanTypes.RPC + childOf span(0) + errored false + tags { + "$Tags.COMPONENT" "armeria-grpc-server" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_SERVER + "message.type" "example.Helloworld\$Response" + defaultTags() + } + } + } + } + } + + cleanup: + serverRule.stop() + + where: + name | clientMessageCount | serverMessageCount + "A" | 1 | 1 + "B" | 2 | 1 + "C" | 1 | 2 + "D" | 2 | 2 + "E" | 3 | 3 + "A" | 1 | 1 + "B" | 2 | 1 + "C" | 1 | 2 + "D" | 2 | 2 + "E" | 3 | 3 + + clientRange = 1..clientMessageCount + serverRange = 1..serverMessageCount + } +} + +class ArmeriaGrpcStreamingV0ForkedTest extends ArmeriaGrpcStreamingTest { + + @Override + int version() { + return 0 + } + + @Override + protected String clientOperation() { + return "grpc.client" + } + + @Override + protected String serverOperation() { + return "grpc.server" + } +} + +class ArmeriaGrpcStreamingV1ForkedTest extends ArmeriaGrpcStreamingTest { + + @Override + int version() { + return 1 + } + + @Override + protected String clientOperation() { + return "grpc.client.request" + } + + @Override + protected String serverOperation() { + return "grpc.server.request" + } +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/test/groovy/ArmeriaGrpcTest.groovy b/dd-java-agent/instrumentation/armeria-grpc/src/test/groovy/ArmeriaGrpcTest.groovy new file mode 100644 index 00000000000..4f93259bedb --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/test/groovy/ArmeriaGrpcTest.groovy @@ -0,0 +1,730 @@ +import com.google.common.util.concurrent.ListenableFuture +import com.linecorp.armeria.client.Clients +import com.linecorp.armeria.common.SessionProtocol +import com.linecorp.armeria.common.grpc.GrpcSerializationFormats +import com.linecorp.armeria.server.Server +import com.linecorp.armeria.server.ServerBuilder +import com.linecorp.armeria.server.grpc.GrpcService +import com.linecorp.armeria.testing.junit4.server.ServerRule +import datadog.trace.agent.test.naming.VersionedNamingTestBase +import datadog.trace.api.DDSpanId +import datadog.trace.api.DDSpanTypes +import datadog.trace.api.DDTags +import datadog.trace.api.function.TriConsumer +import datadog.trace.api.gateway.Flow +import datadog.trace.api.gateway.RequestContext +import datadog.trace.api.gateway.RequestContextSlot +import datadog.trace.bootstrap.instrumentation.api.AgentPropagation +import datadog.trace.bootstrap.instrumentation.api.AgentTracer +import datadog.trace.bootstrap.instrumentation.api.Tags +import datadog.trace.core.datastreams.StatsGroup +import datadog.trace.instrumentation.armeria.grpc.server.GrpcExtractAdapter +import example.GreeterGrpc +import example.Helloworld +import io.grpc.Metadata +import io.grpc.Status +import io.grpc.StatusRuntimeException +import io.grpc.stub.StreamObserver +import spock.lang.Shared + +import java.time.Duration +import java.util.concurrent.ExecutorService +import java.util.concurrent.Executors +import java.util.function.BiFunction +import java.util.function.Function +import java.util.function.Supplier + +import static datadog.trace.agent.test.utils.TraceUtils.basicSpan +import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace +import static datadog.trace.api.gateway.Events.EVENTS + +abstract class ArmeriaGrpcTest extends VersionedNamingTestBase { + + @Shared + def ig + + def collectedAppSecHeaders = [:] + boolean appSecHeaderDone = false + def collectedAppSecReqMsgs = [] + + final Duration timeoutDuration() { + return Duration.ofSeconds(5) + } + + @Override + final String service() { + return null + } + + @Override + final String operation() { + return null + } + + protected abstract String clientOperation() + + protected abstract String serverOperation() + + @Override + protected void configurePreAgent() { + super.configurePreAgent() + injectSysConfig("dd.trace.grpc.ignored.inbound.methods", "example.Greeter/IgnoreInbound") + injectSysConfig("dd.trace.grpc.ignored.outbound.methods", "example.Greeter/Ignore") + // here to trigger wrapping to record scheduling time - the logic is trivial so it's enough to verify + // that ClassCastExceptions do not arise from the wrapping + injectSysConfig("dd.profiling.enabled", "true") + } + + @Override + boolean useStrictTraceWrites() { + false + } + + def setupSpec() { + ig = AgentTracer.get().getCallbackProvider(RequestContextSlot.APPSEC) + } + + def setup() { + ig.registerCallback(EVENTS.requestStarted(), { -> new Flow.ResultFlow(new Object()) } as Supplier) + ig.registerCallback(EVENTS.requestHeader(), { reqCtx, name, value -> + collectedAppSecHeaders[name] = value + } as TriConsumer) + ig.registerCallback(EVENTS.requestHeaderDone(), { + appSecHeaderDone = true + Flow.ResultFlow.empty() + } as Function>) + ig.registerCallback(EVENTS.grpcServerRequestMessage(), { reqCtx, obj -> + collectedAppSecReqMsgs << obj + Flow.ResultFlow.empty() + } as BiFunction>) + } + + def cleanup() { + ig.reset() + } + + def "test request-response"() { + setup: + ExecutorService responseExecutor = Executors.newSingleThreadExecutor() + ServerRule serverRule = new ServerRule() { + @Override + protected void configure(ServerBuilder sb) throws Exception { + sb.service(GrpcService.builder().addService(new GreeterGrpc.GreeterImplBase() { + @Override + void sayHello( + final Helloworld.Request req, final StreamObserver responseObserver) { + final Helloworld.Response reply = Helloworld.Response.newBuilder().setMessage("Hello $req.name").build() + responseExecutor.execute { + if (TEST_TRACER.activeSpan() == null) { + responseObserver.onError(new IllegalStateException("no active span")) + } else { + responseObserver.onNext(reply) + responseObserver.onCompleted() + } + } + } + }).build()) + } + } + serverRule.configure(Server.builder().requestTimeout(timeoutDuration())) + serverRule.start() + + GreeterGrpc.GreeterFutureStub client = Clients.builder(serverRule.uri(SessionProtocol.HTTP, GrpcSerializationFormats.PROTO)) + .writeTimeout(timeoutDuration()) + .responseTimeout(timeoutDuration()) + .build(GreeterGrpc.GreeterFutureStub) + + + def response = null + + when: + runUnderTrace("parent") { + ListenableFuture responseListenableFuture = client.sayHello(Helloworld.Request.newBuilder().setName(name).build()) + response = responseListenableFuture.get() + } + // wait here to make checkpoint asserts deterministic + TEST_WRITER.waitForTraces(2) + if (isDataStreamsEnabled()) { + TEST_DATA_STREAMS_WRITER.waitForGroups(2) + } + + then: + response.message == "Hello $name" + assertTraces(2) { + sortSpansByStart() + trace(3) { + basicSpan(it, "parent") + span { + operationName clientOperation() + resourceName "example.Greeter/SayHello" + spanType DDSpanTypes.RPC + childOf span(0) + errored false + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-client" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "$Tags.RPC_SERVICE" "example.Greeter" + "status.code" "OK" + "request.type" "example.Helloworld\$Request" + "response.type" "example.Helloworld\$Response" + if ({ isDataStreamsEnabled() }) { + "$DDTags.PATHWAY_HASH" { String } + } + peerServiceFrom(Tags.RPC_SERVICE) + defaultTags() + } + } + span { + operationName "grpc.message" + resourceName "grpc.message" + spanType DDSpanTypes.RPC + childOf span(1) + errored false + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-client" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "message.type" "example.Helloworld\$Response" + defaultTagsNoPeerService() + } + } + } + trace(2) { + span { + operationName serverOperation() + resourceName "example.Greeter/SayHello" + spanType DDSpanTypes.RPC + childOf trace(0).get(1) + errored false + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-server" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_SERVER + "status.code" "OK" + if ({ isDataStreamsEnabled() }) { + "$DDTags.PATHWAY_HASH" { String } + } + defaultTags(true) + } + } + span { + operationName "grpc.message" + resourceName "grpc.message" + spanType DDSpanTypes.RPC + childOf span(0) + errored false + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-server" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_SERVER + "message.type" "example.Helloworld\$Request" + defaultTags() + } + } + } + } + + and: + def traceId = TEST_WRITER[0].traceId.first() + traceId.toLong() as String == collectedAppSecHeaders['x-datadog-trace-id'] + collectedAppSecReqMsgs.size() == 1 + collectedAppSecReqMsgs.first().name == name + + and: + if (isDataStreamsEnabled()) { + StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } + verifyAll(first) { + edgeTags.containsAll(["direction:out", "type:grpc"]) + edgeTags.size() == 2 + } + + StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } + verifyAll(second) { + edgeTags.containsAll(["direction:in", "type:grpc"]) + edgeTags.size() == 2 + } + } + + cleanup: + serverRule.stop().get() + + where: + name << ["some name", "some other name"] + } + + def "test error - #name"() { + setup: + def error = status.asException() + ServerRule serverRule = new ServerRule() { + @Override + protected void configure(ServerBuilder sb) throws Exception { + sb.service(GrpcService.builder().addService(new GreeterGrpc.GreeterImplBase() { + @Override + void sayHello( + final Helloworld.Request req, final StreamObserver responseObserver) { + responseObserver.onError(error) + } + }).build()) + } + } + serverRule.configure(Server.builder().requestTimeout(timeoutDuration())) + serverRule.start() + + GreeterGrpc.GreeterBlockingStub client = Clients.builder(serverRule.uri(SessionProtocol.HTTP, GrpcSerializationFormats.PROTO)) + .writeTimeout(timeoutDuration()) + .responseTimeout(timeoutDuration()) + .build(GreeterGrpc.GreeterBlockingStub) + + when: + client.sayHello(Helloworld.Request.newBuilder().setName(name).build()) + // wait here to make checkpoint asserts deterministic + TEST_WRITER.waitForTraces(2) + + then: + thrown StatusRuntimeException + + assertTraces(2) { + sortSpansByStart() + trace(1) { + span { + operationName clientOperation() + resourceName "example.Greeter/SayHello" + spanType DDSpanTypes.RPC + parent() + errored true + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-client" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "$Tags.RPC_SERVICE" "example.Greeter" + "status.code" "${status.code.name()}" + "status.description" description + "request.type" "example.Helloworld\$Request" + "response.type" "example.Helloworld\$Response" + if ({ isDataStreamsEnabled() }) { + "$DDTags.PATHWAY_HASH" { String } + } + peerServiceFrom(Tags.RPC_SERVICE) + defaultTags() + } + } + } + trace(2) { + span { + operationName serverOperation() + resourceName "example.Greeter/SayHello" + spanType DDSpanTypes.RPC + childOf trace(0).get(0) + errored true + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-server" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_SERVER + "status.code" "${status.code.name()}" + "status.description" description + "canceled" { true } // 1.0.0 handles cancellation incorrectly so accesting any value + if (status.cause != null) { + errorTags status.cause.class, status.cause.message + } + if ({ isDataStreamsEnabled() }) { + "$DDTags.PATHWAY_HASH" { String } + } + defaultTags(true) + } + } + span { + operationName "grpc.message" + resourceName "grpc.message" + spanType DDSpanTypes.RPC + childOf span(0) + errored false + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-server" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_SERVER + "message.type" "example.Helloworld\$Request" + defaultTags() + } + } + } + } + + cleanup: + serverRule.stop().get() + + where: + name | status | description + "Runtime - cause" | Status.UNKNOWN.withCause(new RuntimeException("some error")) | null + "Status - cause" | Status.PERMISSION_DENIED.withCause(new RuntimeException("some error")) | null + "StatusRuntime - cause" | Status.UNIMPLEMENTED.withCause(new RuntimeException("some error")) | null + "Runtime - description" | Status.UNKNOWN.withDescription("some description") | "some description" + "Status - description" | Status.PERMISSION_DENIED.withDescription("some description") | "some description" + "StatusRuntime - description" | Status.UNIMPLEMENTED.withDescription("some description") | "some description" + } + + def "test error thrown - #name"() { + setup: + + def error = status.asRuntimeException() + ServerRule serverRule = new ServerRule() { + @Override + protected void configure(ServerBuilder sb) throws Exception { + sb.service(GrpcService.builder().addService(new GreeterGrpc.GreeterImplBase() { + @Override + void sayHello( + final Helloworld.Request req, final StreamObserver responseObserver) { + throw error + } + }).build()) + } + } + serverRule.configure(Server.builder().requestTimeout(timeoutDuration())) + serverRule.start() + + GreeterGrpc.GreeterBlockingStub client = Clients.builder(serverRule.uri(SessionProtocol.HTTP, GrpcSerializationFormats.PROTO)) + .writeTimeout(timeoutDuration()) + .responseTimeout(timeoutDuration()) + .build(GreeterGrpc.GreeterBlockingStub) + + when: + client.sayHello(Helloworld.Request.newBuilder().setName(name).build()) + // wait here to make checkpoint asserts deterministic + TEST_WRITER.waitForTraces(2) + + then: + thrown StatusRuntimeException + + assertTraces(2) { + sortSpansByStart() + trace(1) { + span { + operationName clientOperation() + resourceName "example.Greeter/SayHello" + spanType DDSpanTypes.RPC + parent() + errored true + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-client" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "$Tags.RPC_SERVICE" "example.Greeter" + "status.code" status.code.name() + if (status.description != null) { + "status.description" status.description + } + "request.type" "example.Helloworld\$Request" + "response.type" "example.Helloworld\$Response" + if ({ isDataStreamsEnabled() }) { + "$DDTags.PATHWAY_HASH" { String } + } + peerServiceFrom(Tags.RPC_SERVICE) + defaultTags() + } + } + } + trace(2) { + span { + operationName serverOperation() + resourceName "example.Greeter/SayHello" + spanType DDSpanTypes.RPC + childOf trace(0).get(0) + errored true + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-server" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_SERVER + errorTags error.class, error.message + "canceled" { true } // 1.0.0 handles cancellation incorrectly so accesting any value + if ({ isDataStreamsEnabled() }) { + "$DDTags.PATHWAY_HASH" { String } + } + defaultTags(true) + } + } + span { + operationName "grpc.message" + resourceName "grpc.message" + spanType DDSpanTypes.RPC + childOf span(0) + errored false + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-server" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_SERVER + "message.type" "example.Helloworld\$Request" + defaultTags() + } + } + } + } + + cleanup: + serverRule.stop().get() + + where: + name | status + "Runtime - cause" | Status.UNKNOWN.withCause(new RuntimeException("some error")) + "Status - cause" | Status.PERMISSION_DENIED.withCause(new RuntimeException("some error")) + "StatusRuntime - cause" | Status.UNIMPLEMENTED.withCause(new RuntimeException("some error")) + "Runtime - description" | Status.UNKNOWN.withDescription("some description") + "Status - description" | Status.PERMISSION_DENIED.withDescription("some description") + "StatusRuntime - description" | Status.UNIMPLEMENTED.withDescription("some description") + } + + def "skip binary headers"() { + setup: + def meta = new Metadata() + meta.put(Metadata.Key. of("test", Metadata.ASCII_STRING_MARSHALLER), "val") + meta.put(Metadata.Key. of("test-bin", Metadata.BINARY_BYTE_MARSHALLER), "bin-val".bytes) + + when: + def keys = new ArrayList() + GrpcExtractAdapter.GETTER.forEachKey(meta, new AgentPropagation.KeyClassifier() { + + @Override + boolean accept(String key, String value) { + keys.add(key.toLowerCase()) + return true + } + }) + + then: + keys == ["test"] + } + + def "test ignore ignored methods"() { + setup: + + ExecutorService responseExecutor = Executors.newSingleThreadExecutor() + ServerRule serverRule = new ServerRule() { + @Override + protected void configure(ServerBuilder sb) throws Exception { + sb.service(GrpcService.builder().addService(new GreeterGrpc.GreeterImplBase() { + @Override + void ignore( + final Helloworld.Request req, final StreamObserver responseObserver) { + final Helloworld.Response reply = Helloworld.Response.newBuilder().setMessage("Hello $req.name").build() + responseExecutor.execute { + responseObserver.onNext(reply) + responseObserver.onCompleted() + } + } + }).build()) + } + } + serverRule.configure(Server.builder().requestTimeout(timeoutDuration())) + serverRule.start() + + GreeterGrpc.GreeterBlockingStub client = Clients.builder(serverRule.uri(SessionProtocol.HTTP, GrpcSerializationFormats.PROTO)) + .writeTimeout(timeoutDuration()) + .responseTimeout(timeoutDuration()) + .build(GreeterGrpc.GreeterBlockingStub) + + when: + def response = runUnderTrace("parent") { + def resp = client.ignore(Helloworld.Request.newBuilder().setName("whatever").build()) + return resp + } + + then: + response.message == "Hello whatever" + assertTraces(2) { + sortSpansByStart() + trace(1) { + basicSpan(it, "parent") + } + trace(2) { + span { + operationName serverOperation() + resourceName "example.Greeter/Ignore" + spanType DDSpanTypes.RPC + parentSpanId DDSpanId.ZERO + errored false + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-server" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_SERVER + "status.code" "OK" + if ({ isDataStreamsEnabled() }) { + "$DDTags.PATHWAY_HASH" { String } + } + defaultTags(true) + } + } + span { + operationName "grpc.message" + resourceName "grpc.message" + spanType DDSpanTypes.RPC + childOf span(0) + errored false + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-server" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_SERVER + "message.type" "example.Helloworld\$Request" + defaultTags() + } + } + } + } + + cleanup: + serverRule.stop() + } + + def "test ignore ignored inbound methods"() { + setup: + + ExecutorService responseExecutor = Executors.newSingleThreadExecutor() + ServerRule serverRule = new ServerRule() { + @Override + protected void configure(ServerBuilder sb) throws Exception { + sb.service(GrpcService.builder().addService(new GreeterGrpc.GreeterImplBase() { + @Override + void ignoreInbound( + final Helloworld.Request req, final StreamObserver responseObserver) { + final Helloworld.Response reply = Helloworld.Response.newBuilder().setMessage("Hello $req.name").build() + responseExecutor.execute { + responseObserver.onNext(reply) + responseObserver.onCompleted() + } + } + }).build()) + } + } + serverRule.configure(Server.builder().requestTimeout(timeoutDuration())) + serverRule.start() + + GreeterGrpc.GreeterBlockingStub client = Clients.builder(serverRule.uri(SessionProtocol.HTTP, GrpcSerializationFormats.PROTO)) + .writeTimeout(timeoutDuration()) + .responseTimeout(timeoutDuration()) + .build(GreeterGrpc.GreeterBlockingStub) + + when: + def response = client.ignoreInbound(Helloworld.Request.newBuilder().setName("whatever").build()) + + then: + response.message == "Hello whatever" + assertTraces(1) { + sortSpansByStart() + trace(2) { + span { + operationName clientOperation() + resourceName "example.Greeter/IgnoreInbound" + spanType DDSpanTypes.RPC + parent() + errored false + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-client" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "$Tags.RPC_SERVICE" "example.Greeter" + "status.code" "OK" + "request.type" "example.Helloworld\$Request" + "response.type" "example.Helloworld\$Response" + if ({ isDataStreamsEnabled() }) { + "$DDTags.PATHWAY_HASH" { String } + } + peerServiceFrom(Tags.RPC_SERVICE) + defaultTags() + } + } + span { + operationName "grpc.message" + resourceName "grpc.message" + spanType DDSpanTypes.RPC + childOf span(0) + errored false + measured true + tags { + "$Tags.COMPONENT" "armeria-grpc-client" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "message.type" "example.Helloworld\$Response" + defaultTagsNoPeerService() + } + } + } + } + + cleanup: + serverRule.stop() + } +} + +abstract class ArmeriaGrpcDataStreamsEnabledForkedTest extends ArmeriaGrpcTest { + @Override + protected void configurePreAgent() { + super.configurePreAgent() + injectSysConfig("dd.data.streams.enabled", "true") + } + + @Override + protected boolean isDataStreamsEnabled() { + return true + } +} + +class ArmeriaGrpcDataStreamsEnabledV0ForkedTest extends ArmeriaGrpcDataStreamsEnabledForkedTest { + + @Override + int version() { + return 0 + } + + @Override + protected String clientOperation() { + return "grpc.client" + } + + @Override + protected String serverOperation() { + return "grpc.server" + } +} + +class ArmeriaGrpcDataStreamsEnabledV1ForkedTest extends ArmeriaGrpcDataStreamsEnabledForkedTest { + + @Override + int version() { + return 1 + } + + @Override + protected String clientOperation() { + return "grpc.client.request" + } + + @Override + protected String serverOperation() { + return "grpc.server.request" + } +} + +class ArmeriaGrpcDataStreamsDisabledForkedTest extends ArmeriaGrpcTest { + @Override + protected void configurePreAgent() { + super.configurePreAgent() + injectSysConfig("dd.data.streams.enabled", "false") + } + + @Override + protected boolean isDataStreamsEnabled() { + return false + } + + @Override + int version() { + return 0 + } + + @Override + protected String clientOperation() { + return "grpc.client" + } + + @Override + protected String serverOperation() { + return "grpc.server" + } +} diff --git a/dd-java-agent/instrumentation/armeria-grpc/src/test/proto/helloworld.proto b/dd-java-agent/instrumentation/armeria-grpc/src/test/proto/helloworld.proto new file mode 100644 index 00000000000..26d9919e53e --- /dev/null +++ b/dd-java-agent/instrumentation/armeria-grpc/src/test/proto/helloworld.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; + +package example; + +service Greeter { + rpc SayHello (Request) returns (Response) { + } + + rpc Conversation (stream Response) returns (stream Response) { + } + + rpc Ignore (Request) returns (Response) { + } + + rpc IgnoreInbound (Request) returns (Response) { + } +} + +message Request { + string name = 1; +} + +message Response { + string message = 1; +} diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/build.gradle b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/build.gradle index 82bf01ca4a5..2e036a050e1 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/build.gradle +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/build.gradle @@ -23,7 +23,15 @@ apply from: "$rootDir/gradle/java.gradle" // We test older version in separate test set to test newer version and latest deps in the 'default' // test dir. Otherwise we get strange warnings in Idea. addTestSuite('test_before_1_11_106') +addTestSuiteExtendingForDir('test_before_1_11_106ForkedTest', 'test_before_1_11_106', 'test_before_1_11_106') + addTestSuiteForDir('latestDepTest', 'test') +addTestSuiteExtendingForDir('latestDepForkedTest', 'latestDepTest', 'test') + +addTestSuite('kinesisDsmTest') +addTestSuiteExtendingForDir('kinesisDsmForkedTest', 'kinesisDsmTest', 'kinesisDsmTest') +addTestSuiteForDir('latestKinesisDsmTest', 'kinesisDsmTest') +addTestSuiteExtendingForDir('latestKinesisDsmForkedTest', 'latestKinesisDsmTest', 'kinesisDsmTest') dependencies { compileOnly group: 'com.amazonaws', name: 'aws-java-sdk-core', version: '1.11.0' @@ -78,6 +86,9 @@ dependencies { } } + kinesisDsmTestImplementation group: 'com.amazonaws', name: 'aws-java-sdk-kinesis', version: '1.12.366' + latestKinesisDsmTestImplementation group: 'com.amazonaws', name: 'aws-java-sdk-kinesis', version: '+' + latestDepTestImplementation group: 'com.amazonaws', name: 'aws-java-sdk-s3', version: '+' latestDepTestImplementation group: 'com.amazonaws', name: 'aws-java-sdk-rds', version: '+' latestDepTestImplementation group: 'com.amazonaws', name: 'aws-java-sdk-ec2', version: '+' @@ -90,3 +101,7 @@ dependencies { tasks.named("test").configure { dependsOn "test_before_1_11_106" } + +tasks.named("forkedTest").configure { + dependsOn "test_before_1_11_106ForkedTest" +} diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/kinesisDsmTest/groovy/AWS1KinesisClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/kinesisDsmTest/groovy/AWS1KinesisClientTest.groovy new file mode 100644 index 00000000000..738b2ac1bb2 --- /dev/null +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/kinesisDsmTest/groovy/AWS1KinesisClientTest.groovy @@ -0,0 +1,225 @@ +import com.amazonaws.auth.AWSStaticCredentialsProvider +import com.amazonaws.auth.AnonymousAWSCredentials +import com.amazonaws.client.builder.AwsClientBuilder +import com.amazonaws.services.kinesis.AmazonKinesis +import com.amazonaws.services.kinesis.AmazonKinesisClientBuilder +import com.amazonaws.services.kinesis.model.GetRecordsRequest +import com.amazonaws.services.kinesis.model.PutRecordRequest +import com.amazonaws.services.kinesis.model.PutRecordsRequest +import com.amazonaws.services.kinesis.model.PutRecordsRequestEntry +import datadog.trace.agent.test.naming.VersionedNamingTestBase +import datadog.trace.api.Config +import datadog.trace.api.DDSpanTypes +import datadog.trace.api.DDTags +import datadog.trace.bootstrap.instrumentation.api.Tags +import datadog.trace.core.datastreams.StatsGroup +import spock.lang.AutoCleanup +import spock.lang.Shared +import spock.util.concurrent.PollingConditions + +import java.nio.ByteBuffer +import java.nio.charset.Charset +import java.time.Instant +import java.util.concurrent.atomic.AtomicReference + +import static datadog.trace.agent.test.server.http.TestHttpServer.httpServer + +abstract class AWS1KinesisClientTest extends VersionedNamingTestBase { + + @Shared + def credentialsProvider = new AWSStaticCredentialsProvider(new AnonymousAWSCredentials()) + @Shared + def responseBody = new AtomicReference() + @AutoCleanup + @Shared + def server = httpServer { + handlers { + all { + response.status(200).send(responseBody.get()) + } + } + } + @Shared + def endpoint = new AwsClientBuilder.EndpointConfiguration("http://localhost:$server.address.port", "us-west-2") + + @Shared + final String streamName = "somestream" + + @Shared + final String streamArn = "arnprefix:stream/" + streamName + + @Shared + def timestamp = Instant.now().minusSeconds(60) + + @Shared + def timestamp2 = timestamp.plusSeconds(1) + + @Override + protected void configurePreAgent() { + super.configurePreAgent() + // the actual service returns cbor encoded json + System.setProperty("com.amazonaws.sdk.disableCbor", "true") + } + + @Override + protected boolean isDataStreamsEnabled() { + return true + } + + @Override + String operation() { + null + } + + @Override + String service() { + null + } + + abstract String expectedOperation(String awsService, String awsOperation) + + abstract String expectedService(String awsService, String awsOperation) + + def "send #operation request with mocked response produces #dsmStatCount stat points"() { + setup: + def conditions = new PollingConditions(timeout: 1) + responseBody.set(body) + AmazonKinesis client = AmazonKinesisClientBuilder.standard() + .withEndpointConfiguration(endpoint) + .withCredentials(credentialsProvider) + .build() + + when: + def response = call.call(client) + + TEST_WRITER.waitForTraces(1) + TEST_DATA_STREAMS_WRITER.waitForGroups(1) + + then: + response != null + + conditions.eventually { + List results = TEST_DATA_STREAMS_WRITER.groups.findAll { it.parentHash == 0 } + assert results.size() >= 1 + def pathwayLatencyCount = 0 + def edgeLatencyCount = 0 + results.each { group -> + pathwayLatencyCount += group.pathwayLatency.count + edgeLatencyCount += group.edgeLatency.count + verifyAll(group) { + edgeTags.containsAll(["direction:" + dsmDirection, "topic:" + streamArn, "type:kinesis"]) + edgeTags.size() == 3 + } + } + verifyAll { + pathwayLatencyCount == dsmStatCount + edgeLatencyCount == dsmStatCount + } + } + + assertTraces(1) { + trace(1) { + span { + serviceName expectedService(service, operation) + operationName expectedOperation(service, operation) + resourceName "$service.$operation" + spanType DDSpanTypes.HTTP_CLIENT + errored false + measured true + parent() + tags { + "$Tags.COMPONENT" "java-aws-sdk" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "$Tags.HTTP_URL" "$server.address/" + "$Tags.HTTP_METHOD" "$method" + "$Tags.HTTP_STATUS" 200 + "$Tags.PEER_PORT" server.address.port + "$Tags.PEER_HOSTNAME" "localhost" + "aws.service" { it.contains(service) } + "aws_service" { it.contains(service.toLowerCase()) } + "aws.endpoint" "$server.address" + "aws.operation" "${operation}Request" + "aws.agent" "java-aws-sdk" + "aws.stream.name" streamName + "streamname" streamName + "$DDTags.PATHWAY_HASH" { String } + peerServiceFrom("aws.stream.name") + defaultTags() + } + } + } + } + + where: + service | operation | dsmDirection | dsmStatCount | method | path | call | body + "Kinesis" | "GetRecords" | "in" | 1 | "POST" | "/" | { AmazonKinesis c -> c.getRecords(new GetRecordsRequest().withStreamARN(streamArn)) } | """{ + "MillisBehindLatest": 2100, + "NextShardIterator": "AAA", + "Records": [ + { + "Data": "XzxkYXRhPl8w", + "PartitionKey": "partitionKey", + "ApproximateArrivalTimestamp": ${timestamp.toEpochMilli()}, + "SequenceNumber": "21269319989652663814458848515492872193" + } + ] +}""" + "Kinesis" | "GetRecords" | "in" | 2 | "POST" | "/" | { AmazonKinesis c -> c.getRecords(new GetRecordsRequest().withStreamARN(streamArn)) } | """{ + "MillisBehindLatest": 2100, + "NextShardIterator": "AAA", + "Records": [ + { + "Data": "XzxkYXRhPl8w", + "PartitionKey": "partitionKey", + "ApproximateArrivalTimestamp": ${timestamp.toEpochMilli()}, + "SequenceNumber": "21269319989652663814458848515492872193" + }, + { + "Data": "XzxkYXRhPl8w", + "PartitionKey": "partitionKey", + "ApproximateArrivalTimestamp": ${timestamp2.toEpochMilli()}, + "SequenceNumber": "21269319989652663814458848515492872193" + } + ] +}""" + "Kinesis" | "PutRecord" | "out" | 1 | "POST" | "/" | { AmazonKinesis c -> c.putRecord(new PutRecordRequest().withStreamARN(streamArn).withData(ByteBuffer.wrap("message".getBytes(Charset.forName("UTF-8"))))) } | "" + "Kinesis" | "PutRecords" | "out" | 1 | "POST" | "/" | { AmazonKinesis c -> c.putRecords(new PutRecordsRequest().withStreamARN(streamArn).withRecords(new PutRecordsRequestEntry().withData(ByteBuffer.wrap("message".getBytes(Charset.forName("UTF-8")))))) } | "" + "Kinesis" | "PutRecords" | "out" | 2 | "POST" | "/" | { AmazonKinesis c -> c.putRecords(new PutRecordsRequest().withStreamARN(streamArn).withRecords(new PutRecordsRequestEntry().withData(ByteBuffer.wrap("message".getBytes(Charset.forName("UTF-8")))), new PutRecordsRequestEntry().withData(ByteBuffer.wrap("message".getBytes(Charset.forName("UTF-8")))))) } | "" + } +} + +class AWS1KinesisClientV0Test extends AWS1KinesisClientTest { + + @Override + String expectedOperation(String awsService, String awsOperation) { + "aws.http" + } + + @Override + String expectedService(String awsService, String awsOperation) { + return "java-aws-sdk" + } + + @Override + int version() { + 0 + } +} + +class AWS1KinesisClientV1ForkedTest extends AWS1KinesisClientTest { + + @Override + String expectedOperation(String awsService, String awsOperation) { + return "aws.${awsService.toLowerCase()}.request" + } + + @Override + String expectedService(String awsService, String awsOperation) { + Config.get().getServiceName() + } + + @Override + int version() { + 1 + } +} diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java index 178e182795a..b1a7850d1a7 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java @@ -10,14 +10,18 @@ import datadog.trace.api.DDTags; import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; +import datadog.trace.api.experimental.DataStreamsContextCarrier.NoOp; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentPropagation; +import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags; import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; import datadog.trace.bootstrap.instrumentation.decorator.HttpClientDecorator; import java.net.URI; +import java.util.List; import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -46,6 +50,9 @@ public class AwsSdkClientDecorator extends HttpClientDecorator serviceNameCache = DDCaches.newFixedSizeCache(128); private static final Pattern AWS_SERVICE_NAME_PATTERN = Pattern.compile("Amazon\\s?(\\w+)"); + private static final String PUT_RECORD_OPERATION_NAME = "PutRecordRequest"; + private static final String PUT_RECORDS_OPERATION_NAME = "PutRecordsRequest"; + private static String simplifyServiceName(String awsServiceName) { return serviceNameCache.computeIfAbsent( awsServiceName, AwsSdkClientDecorator::applyServiceNamePattern); @@ -69,6 +76,7 @@ public AgentSpan onRequest(final AgentSpan span, final Request request) { final String awsServiceName = request.getServiceName(); final AmazonWebServiceRequest originalRequest = request.getOriginalRequest(); final Class awsOperation = originalRequest.getClass(); + final GetterAccess access = GetterAccess.of(originalRequest); span.setTag(InstrumentationTags.AWS_AGENT, COMPONENT_NAME); span.setTag(InstrumentationTags.AWS_SERVICE, awsServiceName); @@ -86,20 +94,26 @@ public AgentSpan onRequest(final AgentSpan span, final Request request) { case "SQS.ReceiveMessage": case "SQS.DeleteMessage": case "SQS.DeleteMessageBatch": - span.setServiceName(SQS_SERVICE_NAME); + if (SQS_SERVICE_NAME != null) { + span.setServiceName(SQS_SERVICE_NAME); + } break; case "SNS.Publish": - span.setServiceName(SNS_SERVICE_NAME); + if (SNS_SERVICE_NAME != null) { + span.setServiceName(SNS_SERVICE_NAME); + } break; default: - span.setServiceName(GENERIC_SERVICE_NAME); + if (GENERIC_SERVICE_NAME != null) { + span.setServiceName(GENERIC_SERVICE_NAME); + } break; } - RequestAccess access = RequestAccess.of(originalRequest); - String bucketName = access.getBucketName(originalRequest); String bestPrecursor = null; String bestPeerService = null; + + String bucketName = access.getBucketName(originalRequest); if (null != bucketName) { span.setTag(InstrumentationTags.AWS_BUCKET_NAME, bucketName); span.setTag(InstrumentationTags.BUCKET_NAME, bucketName); @@ -134,6 +148,17 @@ public AgentSpan onRequest(final AgentSpan span, final Request request) { bestPrecursor = InstrumentationTags.AWS_STREAM_NAME; bestPeerService = streamName; } + String streamArn = access.getStreamARN(originalRequest); + if (null != streamArn) { + int streamNameStart = streamArn.indexOf(":stream/"); + if (streamNameStart >= 0) { + streamName = streamArn.substring(streamNameStart + 8); + span.setTag(InstrumentationTags.AWS_STREAM_NAME, streamName); + span.setTag(InstrumentationTags.STREAM_NAME, streamName); + bestPrecursor = InstrumentationTags.AWS_STREAM_NAME; + bestPeerService = streamName; + } + } String tableName = access.getTableName(originalRequest); if (null != tableName) { span.setTag(InstrumentationTags.AWS_TABLE_NAME, tableName); @@ -149,6 +174,32 @@ public AgentSpan onRequest(final AgentSpan span, final Request request) { span.setTag(DDTags.PEER_SERVICE_SOURCE, bestPrecursor); } + if (span.traceConfig().isDataStreamsEnabled() + && null != streamArn + && "AmazonKinesis".equals(awsServiceName)) { + switch (awsOperation.getSimpleName()) { + case PUT_RECORD_OPERATION_NAME: + try (AgentScope scope = AgentTracer.activateSpan(span)) { + AgentTracer.get() + .getDataStreamsMonitoring() + .setProduceCheckpoint("kinesis", streamArn, NoOp.INSTANCE); + } + break; + case PUT_RECORDS_OPERATION_NAME: + try (AgentScope scope = AgentTracer.activateSpan(span)) { + List records = access.getRecords(originalRequest); + for (Object ignored : records) { + AgentTracer.get() + .getDataStreamsMonitoring() + .setProduceCheckpoint("kinesis", streamArn, NoOp.INSTANCE); + } + } + break; + default: + break; + } + } + return span; } diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/GetterAccess.java b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/GetterAccess.java new file mode 100644 index 00000000000..972864839cf --- /dev/null +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/GetterAccess.java @@ -0,0 +1,158 @@ +package datadog.trace.instrumentation.aws.v0; + +import datadog.trace.api.GenericClassValue; +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; +import java.util.Date; +import java.util.List; +import java.util.function.Function; +import java.util.regex.Pattern; + +final class GetterAccess { + private static final ClassValue GETTER_ACCESS = + GenericClassValue.of( + // Uses inner class for predictable name for Instrumenter.Default.helperClassNames() + new Function, GetterAccess>() { + @Override + public GetterAccess apply(final Class requestType) { + return new GetterAccess(requestType); + } + }); + + static GetterAccess of(final Object request) { + return GETTER_ACCESS.get(request.getClass()); + } + + private static final Pattern REQUEST_OPERATION_NAME_PATTERN = + Pattern.compile("Request", Pattern.LITERAL); + + private final String operationName; + private final MethodHandle getBucketName; + private final MethodHandle getQueueUrl; + private final MethodHandle getQueueName; + private final MethodHandle getTopicArn; + private final MethodHandle getStreamName; + private final MethodHandle getStreamARN; + private final MethodHandle getRecords; + private final MethodHandle getApproximateArrivalTimestamp; + private final MethodHandle getTableName; + + private GetterAccess(final Class objectType) { + operationName = + REQUEST_OPERATION_NAME_PATTERN.matcher(objectType.getSimpleName()).replaceAll(""); + getBucketName = findStringGetter(objectType, "getBucketName"); + getQueueUrl = findStringGetter(objectType, "getQueueUrl"); + getQueueName = findStringGetter(objectType, "getQueueName"); + getTopicArn = findStringGetter(objectType, "getTopicArn"); + getStreamName = findStringGetter(objectType, "getStreamName"); + getStreamARN = findStringGetter(objectType, "getStreamARN"); + getRecords = findListGetter(objectType, "getRecords"); + getApproximateArrivalTimestamp = + findGetter(objectType, "getApproximateArrivalTimestamp", Date.class); + getTableName = findStringGetter(objectType, "getTableName"); + } + + String getOperationNameFromType() { + return operationName; + } + + String getBucketName(final Object object) { + return invokeForString(getBucketName, object); + } + + String getQueueUrl(final Object object) { + return invokeForString(getQueueUrl, object); + } + + String getQueueName(final Object object) { + return invokeForString(getQueueName, object); + } + + String getTopicArn(final Object object) { + return invokeForString(getTopicArn, object); + } + + String getStreamName(final Object object) { + return invokeForString(getStreamName, object); + } + + String getStreamARN(final Object object) { + return invokeForString(getStreamARN, object); + } + + List getRecords(final Object object) { + return invokeForList(getRecords, object); + } + + String getTableName(final Object object) { + return invokeForString(getTableName, object); + } + + Date getApproximateArrivalTimestamp(final Object object) { + return invoke(getApproximateArrivalTimestamp, object); + } + + private static String invokeForString(final MethodHandle method, final Object object) { + if (null == method) { + return null; + } + try { + return (String) method.invoke(object); + } catch (Throwable e) { + return null; + } + } + + private static List invokeForList(final MethodHandle method, final Object object) { + if (null == method) { + return null; + } + try { + return (List) method.invoke(object); + } catch (Throwable e) { + return null; + } + } + + private static T invoke(final MethodHandle method, final Object object) { + if (null == method) { + return null; + } + try { + return (T) method.invoke(object); + } catch (Throwable e) { + return null; + } + } + + private static final MethodHandles.Lookup PUBLIC_LOOKUP = MethodHandles.publicLookup(); + private static final MethodType STRING_RETURN_TYPE = MethodType.methodType(String.class); + private static final MethodType LIST_RETURN_TYPE = MethodType.methodType(List.class); + + private static MethodHandle findStringGetter( + final Class requestType, final String methodName) { + try { + return PUBLIC_LOOKUP.findVirtual(requestType, methodName, STRING_RETURN_TYPE); + } catch (Throwable e) { + return null; + } + } + + private static MethodHandle findListGetter(final Class requestType, final String methodName) { + try { + return PUBLIC_LOOKUP.findVirtual(requestType, methodName, LIST_RETURN_TYPE); + } catch (Throwable e) { + return null; + } + } + + private static MethodHandle findGetter( + final Class pojoType, final String methodName, final Class returnType) { + try { + return PUBLIC_LOOKUP.findVirtual(pojoType, methodName, MethodType.methodType(returnType)); + } catch (Throwable e) { + return null; + } + } +} diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/HandlerChainFactoryInstrumentation.java b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/HandlerChainFactoryInstrumentation.java index 7bd51250a2c..0393dcd76c9 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/HandlerChainFactoryInstrumentation.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/HandlerChainFactoryInstrumentation.java @@ -33,8 +33,8 @@ public String instrumentedType() { public String[] helperClassNames() { return new String[] { packageName + ".AwsSdkClientDecorator", - packageName + ".RequestAccess", - packageName + ".RequestAccess$1", + packageName + ".GetterAccess", + packageName + ".GetterAccess$1", packageName + ".TracingRequestHandler", packageName + ".AwsNameCache", }; diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/RequestAccess.java b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/RequestAccess.java deleted file mode 100644 index da28418660d..00000000000 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/RequestAccess.java +++ /dev/null @@ -1,85 +0,0 @@ -package datadog.trace.instrumentation.aws.v0; - -import datadog.trace.api.GenericClassValue; -import java.lang.invoke.MethodHandle; -import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; -import java.util.function.Function; - -final class RequestAccess { - private static final ClassValue REQUEST_ACCESS = - GenericClassValue.of( - // Uses inner class for predictable name for Instrumenter.Default.helperClassNames() - new Function, RequestAccess>() { - @Override - public RequestAccess apply(final Class requestType) { - return new RequestAccess(requestType); - } - }); - - static RequestAccess of(final Object request) { - return REQUEST_ACCESS.get(request.getClass()); - } - - private final MethodHandle getBucketName; - private final MethodHandle getQueueUrl; - private final MethodHandle getQueueName; - private final MethodHandle getTopicArn; - private final MethodHandle getStreamName; - private final MethodHandle getTableName; - - private RequestAccess(final Class requestType) { - getBucketName = findGetter(requestType, "getBucketName"); - getQueueUrl = findGetter(requestType, "getQueueUrl"); - getQueueName = findGetter(requestType, "getQueueName"); - getTopicArn = findGetter(requestType, "getTopicArn"); - getStreamName = findGetter(requestType, "getStreamName"); - getTableName = findGetter(requestType, "getTableName"); - } - - String getBucketName(final Object request) { - return invoke(getBucketName, request); - } - - String getQueueUrl(final Object request) { - return invoke(getQueueUrl, request); - } - - String getQueueName(final Object request) { - return invoke(getQueueName, request); - } - - String getTopicArn(final Object request) { - return invoke(getTopicArn, request); - } - - String getStreamName(final Object request) { - return invoke(getStreamName, request); - } - - String getTableName(final Object request) { - return invoke(getTableName, request); - } - - private static String invoke(final MethodHandle method, final Object request) { - if (null == method) { - return null; - } - try { - return (String) method.invoke(request); - } catch (Throwable e) { - return null; - } - } - - private static final MethodHandles.Lookup PUBLIC_LOOKUP = MethodHandles.publicLookup(); - private static final MethodType STRING_RETURN_TYPE = MethodType.methodType(String.class); - - private static MethodHandle findGetter(final Class requestType, final String methodName) { - try { - return PUBLIC_LOOKUP.findVirtual(requestType, methodName, STRING_RETURN_TYPE); - } catch (Throwable e) { - return null; - } - } -} diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java index fd66500b04b..557e8aa3f23 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java @@ -4,6 +4,10 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.noopSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.propagate; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; +import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; +import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; +import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; +import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.aws.v0.AwsSdkClientDecorator.AWS_LEGACY_TRACING; import static datadog.trace.instrumentation.aws.v0.AwsSdkClientDecorator.DECORATE; @@ -15,7 +19,13 @@ import datadog.trace.api.Config; import datadog.trace.api.TracePropagationStyle; import datadog.trace.bootstrap.ContextStore; +import datadog.trace.bootstrap.instrumentation.api.AgentDataStreamsMonitoring; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; +import datadog.trace.bootstrap.instrumentation.api.PathwayContext; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -72,17 +82,44 @@ public void afterResponse(final Request request, final Response response) DECORATE.beforeFinish(span); span.finish(); } + AmazonWebServiceRequest originalRequest = request.getOriginalRequest(); + GetterAccess requestAccess = GetterAccess.of(originalRequest); if (!AWS_LEGACY_TRACING && isPollingResponse(response.getAwsResponse())) { try { // store queueUrl inside response for SqsReceiveResultInstrumentation - AmazonWebServiceRequest originalRequest = request.getOriginalRequest(); responseQueueStore.put( - response.getAwsResponse(), - RequestAccess.of(originalRequest).getQueueUrl(originalRequest)); + response.getAwsResponse(), requestAccess.getQueueUrl(originalRequest)); } catch (Throwable e) { log.debug("Unable to extract queueUrl from ReceiveMessageRequest", e); } } + if (span != null + && span.traceConfig().isDataStreamsEnabled() + && "AmazonKinesis".equals(request.getServiceName()) + && "GetRecords".equals(requestAccess.getOperationNameFromType())) { + String streamArn = requestAccess.getStreamARN(originalRequest); + if (null != streamArn) { + List records = + GetterAccess.of(response.getAwsResponse()).getRecords(response.getAwsResponse()); + if (null != records) { + LinkedHashMap sortedTags = new LinkedHashMap<>(); + sortedTags.put(DIRECTION_TAG, DIRECTION_IN); + sortedTags.put(TOPIC_TAG, streamArn); + sortedTags.put(TYPE_TAG, "kinesis"); + for (Object record : records) { + Date arrivalTime = GetterAccess.of(record).getApproximateArrivalTimestamp(record); + AgentDataStreamsMonitoring dataStreamsMonitoring = + AgentTracer.get().getDataStreamsMonitoring(); + PathwayContext pathwayContext = dataStreamsMonitoring.newPathwayContext(); + pathwayContext.setCheckpoint( + sortedTags, dataStreamsMonitoring::add, arrivalTime.getTime()); + if (!span.context().getPathwayContext().isStarted()) { + span.context().mergePathwayContext(pathwayContext); + } + } + } + } + } } @Override @@ -90,7 +127,14 @@ public void afterError(final Request request, final Response response, fin final AgentSpan span = request.getHandlerContext(SPAN_CONTEXT_KEY); if (span != null) { request.addHandlerContext(SPAN_CONTEXT_KEY, null); - DECORATE.onError(span, e); + if (response != null) { + DECORATE.onResponse(span, response); + if (span.isError()) { + DECORATE.onError(span, e); + } + } else { + DECORATE.onError(span, e); + } DECORATE.beforeFinish(span); span.finish(); } diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/build.gradle b/dd-java-agent/instrumentation/aws-java-sdk-2.2/build.gradle index 1335d28286d..c5fe90b2312 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/build.gradle +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/build.gradle @@ -1,4 +1,3 @@ - muzzle { pass { group = "software.amazon.awssdk" @@ -11,7 +10,14 @@ muzzle { apply from: "$rootDir/gradle/java.gradle" -addTestSuite('latestDepTest') +addTestSuiteForDir('latestDepTest', 'test') +// Broken: at some point S3 moved the bucket name to the hostname resulting in host not found somebucket.localhost on all S3 tests +// addTestSuiteExtendingForDir('latestDepForkedTest', 'latestDepTest', 'test') + +addTestSuite('kinesisDsmTest') +addTestSuiteExtendingForDir('kinesisDsmForkedTest', 'kinesisDsmTest', 'kinesisDsmTest') +addTestSuiteForDir('latestKinesisDsmTest', 'kinesisDsmTest') +addTestSuiteExtendingForDir('latestKinesisDsmForkedTest', 'latestKinesisDsmTest', 'kinesisDsmTest') def fixedSdkVersion = '2.20.33' // 2.20.34 is missing and breaks IDEA import @@ -31,6 +37,15 @@ dependencies { testImplementation group: 'software.amazon.awssdk', name: 'dynamodb', version: '2.2.0' testImplementation group: 'software.amazon.awssdk', name: 'kinesis', version: '2.2.0' + testImplementation group: 'org.eclipse.jetty', name: 'jetty-server', version: '9.3.0.v20150612' + testImplementation group: 'org.eclipse.jetty.http2', name: 'http2-server', version: '9.3.0.v20150612' + + // First version where dsm traced operations have required StreamARN parameter + kinesisDsmTestImplementation group: 'software.amazon.awssdk', name: 'apache-client', version: '2.18.40' + kinesisDsmTestImplementation group: 'software.amazon.awssdk', name: 'kinesis', version: '2.18.40' + latestKinesisDsmTestImplementation group: 'software.amazon.awssdk', name: 'apache-client', version: '+' + latestKinesisDsmTestImplementation group: 'software.amazon.awssdk', name: 'kinesis', version: '+' + latestDepTestImplementation project(':dd-java-agent:instrumentation:apache-httpclient-4') latestDepTestImplementation project(':dd-java-agent:instrumentation:netty-4.1') diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/kinesisDsmTest/groovy/Aws2KinesisDataStreamsTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/kinesisDsmTest/groovy/Aws2KinesisDataStreamsTest.groovy new file mode 100644 index 00000000000..2c5f808d857 --- /dev/null +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/kinesisDsmTest/groovy/Aws2KinesisDataStreamsTest.groovy @@ -0,0 +1,369 @@ +import datadog.trace.agent.test.naming.VersionedNamingTestBase +import datadog.trace.api.Config +import datadog.trace.api.DDSpanTypes +import datadog.trace.api.DDTags +import datadog.trace.bootstrap.instrumentation.api.Tags +import datadog.trace.core.datastreams.StatsGroup +import org.eclipse.jetty.http2.server.HTTP2CServerConnectionFactory +import org.eclipse.jetty.server.HttpConfiguration +import org.eclipse.jetty.server.HttpConnectionFactory +import org.eclipse.jetty.server.Server +import org.eclipse.jetty.server.ServerConnector +import org.eclipse.jetty.server.SslConnectionFactory +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider +import software.amazon.awssdk.core.ResponseInputStream +import software.amazon.awssdk.core.SdkBytes +import software.amazon.awssdk.core.interceptor.Context +import software.amazon.awssdk.core.interceptor.ExecutionAttributes +import software.amazon.awssdk.core.interceptor.ExecutionInterceptor +import software.amazon.awssdk.regions.Region +import software.amazon.awssdk.services.kinesis.KinesisAsyncClient +import software.amazon.awssdk.services.kinesis.KinesisClient +import software.amazon.awssdk.services.kinesis.model.GetRecordsRequest +import software.amazon.awssdk.services.kinesis.model.PutRecordRequest +import software.amazon.awssdk.services.kinesis.model.PutRecordsRequest +import software.amazon.awssdk.services.kinesis.model.PutRecordsRequestEntry +import spock.lang.AutoCleanup +import spock.lang.Shared +import spock.lang.Unroll + +import java.time.Instant +import java.util.concurrent.Future +import java.util.concurrent.atomic.AtomicReference + +import static datadog.trace.agent.test.server.http.TestHttpServer.httpServer + +abstract class Aws2KinesisDataStreamsTest extends VersionedNamingTestBase { + + private static final StaticCredentialsProvider CREDENTIALS_PROVIDER = StaticCredentialsProvider + .create(AwsBasicCredentials.create("my-access-key", "my-secret-key")) + + @Shared + def responseBody = new AtomicReference() + @Shared + def servedRequestId = new AtomicReference() + + @Shared + def timestamp = Instant.now().minusSeconds(60) + + @Shared + def timestamp2 = timestamp.plusSeconds(1) + + @AutoCleanup + @Shared + def server = httpServer { + customizer { { + Server server -> { + ServerConnector httpConnector = server.getConnectors().find { + !it.connectionFactories.any { + it instanceof SslConnectionFactory + } + } + HttpConfiguration config = (httpConnector.connectionFactories.find { + it instanceof HttpConnectionFactory + } + as HttpConnectionFactory).getHttpConfiguration() + httpConnector.addConnectionFactory(new HTTP2CServerConnectionFactory(config)) + } + } + } + handlers { + all { + response + .status(200) + .addHeader("x-amzn-RequestId", servedRequestId.get()) + .sendWithType("application/x-amz-json-1.1", responseBody.get()) + } + } + } + + @Override + protected void configurePreAgent() { + super.configurePreAgent() + // the actual service returns cbor encoded json + System.setProperty("aws.cborEnabled", "false") + } + + @Override + String operation() { + null + } + + @Override + String service() { + null + } + + @Override + protected boolean isDataStreamsEnabled() { + true + } + + abstract String expectedOperation(String awsService, String awsOperation) + + abstract String expectedService(String awsService, String awsOperation) + + def watch(builder, callback) { + builder.addExecutionInterceptor(new ExecutionInterceptor() { + @Override + void afterExecution(Context.AfterExecution context, ExecutionAttributes executionAttributes) { + callback.call() + } + }) + } + + @Unroll + def "send #operation request with builder #builder.class.getSimpleName() mocked response"() { + setup: + boolean executed = false + def client = builder + // tests that our instrumentation doesn't disturb any overridden configuration + .overrideConfiguration({ watch(it, { executed = true }) }) + .endpointOverride(server.address) + .region(Region.AP_NORTHEAST_1) + .credentialsProvider(CREDENTIALS_PROVIDER) + .build() + responseBody.set(body) + servedRequestId.set(requestId) + when: + def response = call.call(client) + + if (response instanceof Future) { + response = response.get() + } + TEST_WRITER.waitForTraces(1) + TEST_DATA_STREAMS_WRITER.waitForGroups(1) + + then: + executed + response != null + response.class.simpleName.startsWith(operation) || response instanceof ResponseInputStream + + assertTraces(1) { + trace(1) { + span { + serviceName expectedService(service, operation) + operationName expectedOperation(service, operation) + resourceName "$service.$operation" + spanType DDSpanTypes.HTTP_CLIENT + errored false + measured true + parent() + tags { + def checkPeerService = false + "$Tags.COMPONENT" "java-aws-sdk" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_PORT" server.address.port + "$Tags.HTTP_URL" "${server.address}${path}" + "$Tags.HTTP_METHOD" "$method" + "$Tags.HTTP_STATUS" 200 + "aws.service" "$service" + "aws_service" "$service" + "aws.operation" "${operation}" + "aws.agent" "java-aws-sdk" + "aws.requestId" "$requestId" + "aws.stream.name" "somestream" + "streamname" "somestream" + "$DDTags.PATHWAY_HASH" { String } + peerServiceFrom("aws.stream.name") + checkPeerService = true + defaultTags(false, checkPeerService) + } + } + } + } + + and: + StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } + verifyAll(first) { + edgeTags.containsAll(["direction:" + dsmDirection, "topic:arnprefix:stream/somestream", "type:kinesis"]) + edgeTags.size() == 3 + pathwayLatency.count == dsmStatCount + edgeLatency.count == dsmStatCount + } + + cleanup: + servedRequestId.set(null) + + where: + service | operation | dsmDirection | dsmStatCount | method | path | requestId | builder | call | body + "Kinesis" | "GetRecords" | "in" | 1 | "POST" | "/" | "7a62c49f-347e-4fc4-9331-6e8e7a96aa73" | KinesisClient.builder() | { KinesisClient c -> c.getRecords(GetRecordsRequest.builder().streamARN("arnprefix:stream/somestream").build()) } | """{ + "MillisBehindLatest": 2100, + "NextShardIterator": "AAA", + "Records": [ + { + "Data": "XzxkYXRhPl8w", + "PartitionKey": "partitionKey", + "ApproximateArrivalTimestamp": ${timestamp.toEpochMilli()}, + "SequenceNumber": "21269319989652663814458848515492872193" + } + ] +}""" + "Kinesis" | "GetRecords" | "in" | 2 | "POST" | "/" | "7a62c49f-347e-4fc4-9331-6e8e7a96aa73" | KinesisClient.builder() | { KinesisClient c -> c.getRecords(GetRecordsRequest.builder().streamARN("arnprefix:stream/somestream").build()) } | """{ + "MillisBehindLatest": 2100, + "NextShardIterator": "AAA", + "Records": [ + { + "Data": "XzxkYXRhPl8w", + "PartitionKey": "partitionKey", + "ApproximateArrivalTimestamp": ${timestamp.toEpochMilli()}, + "SequenceNumber": "21269319989652663814458848515492872193" + }, + { + "Data": "XzxkYXRhPl8w", + "PartitionKey": "partitionKey", + "ApproximateArrivalTimestamp": ${timestamp2.toEpochMilli()}, + "SequenceNumber": "21269319989652663814458848515492872193" + } + ] +}""" + "Kinesis" | "PutRecord" | "out" | 1 | "POST" | "/" | "UNKNOWN" | KinesisClient.builder() | { KinesisClient c -> c.putRecord(PutRecordRequest.builder().streamARN("arnprefix:stream/somestream").data(SdkBytes.fromUtf8String("message")).build()) } | "" + "Kinesis" | "PutRecords" | "out" | 1 | "POST" | "/" | "UNKNOWN" | KinesisClient.builder() | { KinesisClient c -> c.putRecords(PutRecordsRequest.builder().streamARN("arnprefix:stream/somestream").records(PutRecordsRequestEntry.builder().data(SdkBytes.fromUtf8String("message")).build()).build()) } | "" + "Kinesis" | "PutRecords" | "out" | 2 | "POST" | "/" | "UNKNOWN" | KinesisClient.builder() | { KinesisClient c -> c.putRecords(PutRecordsRequest.builder().streamARN("arnprefix:stream/somestream").records(PutRecordsRequestEntry.builder().data(SdkBytes.fromUtf8String("message")).build(), PutRecordsRequestEntry.builder().data(SdkBytes.fromUtf8String("message")).build()).build()) } | "" + } + + def "send #operation async request with builder #builder.class.getSimpleName() mocked response"() { + setup: + boolean executed = false + def client = builder + // tests that our instrumentation doesn't disturb any overridden configuration + .overrideConfiguration({ watch(it, { executed = true }) }) + .endpointOverride(server.address) + .region(Region.AP_NORTHEAST_1) + .credentialsProvider(CREDENTIALS_PROVIDER) + .build() + responseBody.set(body) + servedRequestId.set(requestId) + when: + def response = call.call(client) + + if (response instanceof Future) { + response = response.get() + } + TEST_WRITER.waitForTraces(1) + TEST_DATA_STREAMS_WRITER.waitForGroups(1) + + then: + executed + response != null + + assertTraces(1) { + trace(1) { + span { + serviceName expectedService(service, operation) + operationName expectedOperation(service, operation) + resourceName "$service.$operation" + spanType DDSpanTypes.HTTP_CLIENT + errored false + measured true + parent() + tags { + "$Tags.COMPONENT" "java-aws-sdk" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_PORT" server.address.port + "$Tags.HTTP_URL" "${server.address}${path}" + "$Tags.HTTP_METHOD" "$method" + "$Tags.HTTP_STATUS" 200 + "aws.service" "$service" + "aws_service" "$service" + "aws.operation" "${operation}" + "aws.agent" "java-aws-sdk" + "aws.requestId" "$requestId" + "aws.stream.name" "somestream" + "streamname" "somestream" + "$DDTags.PATHWAY_HASH" { String } + peerServiceFrom("aws.stream.name") + defaultTags(false, true) + } + } + } + } + + and: + StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } + verifyAll(first) { + edgeTags.containsAll(["direction:" + dsmDirection, "topic:arnprefix:stream/somestream", "type:kinesis"]) + edgeTags.size() == 3 + pathwayLatency.count == dsmStatCount + edgeLatency.count == dsmStatCount + } + + cleanup: + servedRequestId.set(null) + + where: + service | operation | dsmDirection | dsmStatCount | method | path | requestId | builder | call | body + "Kinesis" | "GetRecords" | "in" | 1 | "POST" | "/" | "7a62c49f-347e-4fc4-9331-6e8e7a96aa73" | KinesisAsyncClient.builder() | { KinesisAsyncClient c -> c.getRecords(GetRecordsRequest.builder().streamARN("arnprefix:stream/somestream").build()) } | """{ + "MillisBehindLatest": 2100, + "NextShardIterator": "AAA", + "Records": [ + { + "Data": "XzxkYXRhPl8w", + "PartitionKey": "partitionKey", + "ApproximateArrivalTimestamp": ${timestamp.toEpochMilli()}, + "SequenceNumber": "21269319989652663814458848515492872193" + } + ] +}""" + "Kinesis" | "GetRecords" | "in" | 2 | "POST" | "/" | "7a62c49f-347e-4fc4-9331-6e8e7a96aa73" | KinesisAsyncClient.builder() | { KinesisAsyncClient c -> c.getRecords(GetRecordsRequest.builder().streamARN("arnprefix:stream/somestream").build()) } | """{ + "MillisBehindLatest": 2100, + "NextShardIterator": "AAA", + "Records": [ + { + "Data": "XzxkYXRhPl8w", + "PartitionKey": "partitionKey", + "ApproximateArrivalTimestamp": ${timestamp.toEpochMilli()}, + "SequenceNumber": "21269319989652663814458848515492872193" + }, + { + "Data": "XzxkYXRhPl8w", + "PartitionKey": "partitionKey", + "ApproximateArrivalTimestamp": ${timestamp2.toEpochMilli()}, + "SequenceNumber": "21269319989652663814458848515492872193" + } + ] +}""" + "Kinesis" | "PutRecord" | "out" | 1 | "POST" | "/" | "UNKNOWN" | KinesisAsyncClient.builder() | { KinesisAsyncClient c -> c.putRecord(PutRecordRequest.builder().streamARN("arnprefix:stream/somestream").data(SdkBytes.fromUtf8String("message")).build()) } | "" + "Kinesis" | "PutRecords" | "out" | 1 | "POST" | "/" | "UNKNOWN" | KinesisAsyncClient.builder() | { KinesisAsyncClient c -> c.putRecords(PutRecordsRequest.builder().streamARN("arnprefix:stream/somestream").records(PutRecordsRequestEntry.builder().data(SdkBytes.fromUtf8String("message")).build()).build()) } | "" + "Kinesis" | "PutRecords" | "out" | 2 | "POST" | "/" | "UNKNOWN" | KinesisAsyncClient.builder() | { KinesisAsyncClient c -> c.putRecords(PutRecordsRequest.builder().streamARN("arnprefix:stream/somestream").records(PutRecordsRequestEntry.builder().data(SdkBytes.fromUtf8String("message")).build(), PutRecordsRequestEntry.builder().data(SdkBytes.fromUtf8String("message")).build()).build()) } | "" + } +} + +class Aws2KinesisDataStreamsV0Test extends Aws2KinesisDataStreamsTest { + + @Override + String expectedOperation(String awsService, String awsOperation) { + "aws.http" + } + + @Override + String expectedService(String awsService, String awsOperation) { + return "java-aws-sdk" + } + + @Override + int version() { + 0 + } +} + +class Aws2KinesisDataStreamsV1ForkedTest extends Aws2KinesisDataStreamsTest { + + @Override + String expectedOperation(String awsService, String awsOperation) { + return "aws.${awsService.toLowerCase()}.request" + } + + @Override + String expectedService(String awsService, String awsOperation) { + Config.get().getServiceName() + } + + @Override + int version() { + 1 + } +} diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java index f9a4b51581f..bc50763d0ff 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java @@ -1,22 +1,42 @@ package datadog.trace.instrumentation.aws.v2; +import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; +import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; +import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; +import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; + import datadog.trace.api.Config; import datadog.trace.api.DDTags; import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; +import datadog.trace.api.experimental.DataStreamsContextCarrier.NoOp; import datadog.trace.api.naming.SpanNaming; +import datadog.trace.bootstrap.InstanceStore; +import datadog.trace.bootstrap.instrumentation.api.AgentDataStreamsMonitoring; import datadog.trace.bootstrap.instrumentation.api.AgentPropagation; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags; +import datadog.trace.bootstrap.instrumentation.api.PathwayContext; import datadog.trace.bootstrap.instrumentation.api.ResourceNamePriorities; import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; import datadog.trace.bootstrap.instrumentation.decorator.HttpClientDecorator; import java.net.URI; +import java.time.Instant; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Optional; +import java.util.Set; import javax.annotation.Nonnull; import software.amazon.awssdk.awscore.AwsResponse; +import software.amazon.awssdk.core.SdkField; +import software.amazon.awssdk.core.SdkPojo; import software.amazon.awssdk.core.SdkRequest; import software.amazon.awssdk.core.SdkResponse; +import software.amazon.awssdk.core.interceptor.ExecutionAttribute; import software.amazon.awssdk.core.interceptor.ExecutionAttributes; import software.amazon.awssdk.core.interceptor.SdkExecutionAttribute; import software.amazon.awssdk.http.SdkHttpRequest; @@ -46,6 +66,22 @@ public class AwsSdkClientDecorator extends HttpClientDecorator KINESIS_PUT_RECORD_OPERATION_NAMES; + + static { + KINESIS_PUT_RECORD_OPERATION_NAMES = new HashSet<>(); + KINESIS_PUT_RECORD_OPERATION_NAMES.add("PutRecord"); + KINESIS_PUT_RECORD_OPERATION_NAMES.add("PutRecords"); + } + + public static final ExecutionAttribute KINESIS_STREAM_ARN_ATTRIBUTE = + InstanceStore.of(ExecutionAttribute.class) + .putIfAbsent("KinesisStreamArn", () -> new ExecutionAttribute<>("KinesisStreamArn")); + + // not static because this object would be ClassLoader specific if multiple SDK instances were + // loaded by different loaders + private SdkField kinesisApproximateArrivalTimestampField = null; + public CharSequence spanName(final ExecutionAttributes attributes) { final String awsServiceName = attributes.getAttribute(SdkExecutionAttribute.SERVICE_NAME); final String awsOperationName = attributes.getAttribute(SdkExecutionAttribute.OPERATION_NAME); @@ -62,7 +98,12 @@ public CharSequence spanName(final ExecutionAttributes attributes) { "aws", attributes.getAttribute(SdkExecutionAttribute.SERVICE_NAME), s)); } - public AgentSpan onSdkRequest(final AgentSpan span, final SdkRequest request) { + public AgentSpan onSdkRequest( + final AgentSpan span, final SdkRequest request, final ExecutionAttributes attributes) { + final String awsServiceName = attributes.getAttribute(SdkExecutionAttribute.SERVICE_NAME); + final String awsOperationName = attributes.getAttribute(SdkExecutionAttribute.OPERATION_NAME); + onOperation(span, awsServiceName, awsOperationName); + // S3 request.getValueForField("Bucket", String.class).ifPresent(name -> setBucketName(span, name)); request @@ -89,10 +130,74 @@ public AgentSpan onSdkRequest(final AgentSpan span, final SdkRequest request) { request .getValueForField("StreamName", String.class) .ifPresent(name -> setStreamName(span, name)); + Optional kinesisStreamArn = request.getValueForField("StreamARN", String.class); + kinesisStreamArn.ifPresent( + streamArn -> { + if (span.traceConfig().isDataStreamsEnabled()) { + attributes.putAttribute(KINESIS_STREAM_ARN_ATTRIBUTE, streamArn); + } + int streamNameStart = streamArn.indexOf(":stream/"); + if (streamNameStart >= 0) { + setStreamName(span, streamArn.substring(streamNameStart + 8)); + } + }); // DynamoDB request.getValueForField("TableName", String.class).ifPresent(name -> setTableName(span, name)); + // DSM + if (span.traceConfig().isDataStreamsEnabled() + && kinesisStreamArn.isPresent() + && "kinesis".equalsIgnoreCase(awsServiceName) + && KINESIS_PUT_RECORD_OPERATION_NAMES.contains(awsOperationName)) { + // https://github.com/DataDog/dd-trace-py/blob/864abb6c99e1cb0449904260bac93e8232261f2a/ddtrace/contrib/botocore/patch.py#L368 + List records = + request + .getValueForField("Records", List.class) + .orElse(Collections.singletonList(request)); // For PutRecord use request + + for (Object ignored : records) { + AgentTracer.get() + .getDataStreamsMonitoring() + .setProduceCheckpoint("kinesis", kinesisStreamArn.get(), NoOp.INSTANCE); + } + } + + return span; + } + + private static AgentSpan onOperation( + final AgentSpan span, final String awsServiceName, final String awsOperationName) { + String awsRequestName = awsServiceName + "." + awsOperationName; + span.setResourceName(awsRequestName, RESOURCE_NAME_PRIORITY); + + switch (awsRequestName) { + case "Sqs.SendMessage": + case "Sqs.SendMessageBatch": + case "Sqs.ReceiveMessage": + case "Sqs.DeleteMessage": + case "Sqs.DeleteMessageBatch": + if (SQS_SERVICE_NAME != null) { + span.setServiceName(SQS_SERVICE_NAME); + } + break; + case "Sns.PublishBatch": + case "Sns.Publish": + if (SNS_SERVICE_NAME != null) { + span.setServiceName(SNS_SERVICE_NAME); + } + break; + default: + if (GENERIC_SERVICE_NAME != null) { + span.setServiceName(GENERIC_SERVICE_NAME); + } + break; + } + span.setTag(InstrumentationTags.AWS_AGENT, COMPONENT_NAME); + span.setTag(InstrumentationTags.AWS_SERVICE, awsServiceName); + span.setTag(InstrumentationTags.TOP_LEVEL_AWS_SERVICE, awsServiceName); + span.setTag(InstrumentationTags.AWS_OPERATION, awsOperationName); + return span; } @@ -134,43 +239,62 @@ private static void setTableName(AgentSpan span, String name) { setPeerService(span, InstrumentationTags.AWS_TABLE_NAME, name); } - public AgentSpan onAttributes(final AgentSpan span, final ExecutionAttributes attributes) { - final String awsServiceName = attributes.getAttribute(SdkExecutionAttribute.SERVICE_NAME); - final String awsOperationName = attributes.getAttribute(SdkExecutionAttribute.OPERATION_NAME); - - String awsRequestName = awsServiceName + "." + awsOperationName; - span.setResourceName(awsRequestName, RESOURCE_NAME_PRIORITY); - - switch (awsRequestName) { - case "Sqs.SendMessage": - case "Sqs.SendMessageBatch": - case "Sqs.ReceiveMessage": - case "Sqs.DeleteMessage": - case "Sqs.DeleteMessageBatch": - span.setServiceName(SQS_SERVICE_NAME); - break; - case "Sns.PublishBatch": - case "Sns.Publish": - span.setServiceName(SNS_SERVICE_NAME); - break; - default: - span.setServiceName(GENERIC_SERVICE_NAME); - break; - } - span.setTag(InstrumentationTags.AWS_AGENT, COMPONENT_NAME); - span.setTag(InstrumentationTags.AWS_SERVICE, awsServiceName); - span.setTag(InstrumentationTags.TOP_LEVEL_AWS_SERVICE, awsServiceName); - span.setTag(InstrumentationTags.AWS_OPERATION, awsOperationName); - - return span; - } - - // Not overriding the super. Should call both with each type of response. - public AgentSpan onResponse(final AgentSpan span, final SdkResponse response) { + public AgentSpan onSdkResponse( + final AgentSpan span, final SdkResponse response, final ExecutionAttributes attributes) { if (response instanceof AwsResponse) { span.setTag( InstrumentationTags.AWS_REQUEST_ID, ((AwsResponse) response).responseMetadata().requestId()); + + final String awsServiceName = attributes.getAttribute(SdkExecutionAttribute.SERVICE_NAME); + final String awsOperationName = attributes.getAttribute(SdkExecutionAttribute.OPERATION_NAME); + if (span.traceConfig().isDataStreamsEnabled() + && "kinesis".equalsIgnoreCase(awsServiceName) + && "GetRecords".equals(awsOperationName)) { + // https://github.com/DataDog/dd-trace-py/blob/864abb6c99e1cb0449904260bac93e8232261f2a/ddtrace/contrib/botocore/patch.py#L350 + String streamArn = attributes.getAttribute(KINESIS_STREAM_ARN_ATTRIBUTE); + if (null != streamArn) { + response + .getValueForField("Records", List.class) + .ifPresent( + recordsRaw -> { + //noinspection unchecked + List records = (List) recordsRaw; + if (!records.isEmpty()) { + LinkedHashMap sortedTags = new LinkedHashMap<>(); + sortedTags.put(DIRECTION_TAG, DIRECTION_IN); + sortedTags.put(TOPIC_TAG, streamArn); + sortedTags.put(TYPE_TAG, "kinesis"); + if (null == kinesisApproximateArrivalTimestampField) { + Optional> maybeField = + records.get(0).sdkFields().stream() + .filter(f -> f.locationName().equals("ApproximateArrivalTimestamp")) + .findFirst(); + if (maybeField.isPresent()) { + //noinspection unchecked + kinesisApproximateArrivalTimestampField = + (SdkField) maybeField.get(); + } else { + // shouldn't be possible + return; + } + } + for (SdkPojo record : records) { + Instant arrivalTime = + kinesisApproximateArrivalTimestampField.getValueOrDefault(record); + AgentDataStreamsMonitoring dataStreamsMonitoring = + AgentTracer.get().getDataStreamsMonitoring(); + PathwayContext pathwayContext = dataStreamsMonitoring.newPathwayContext(); + pathwayContext.setCheckpoint( + sortedTags, dataStreamsMonitoring::add, arrivalTime.toEpochMilli()); + if (!span.context().getPathwayContext().isStarted()) { + span.context().mergePathwayContext(pathwayContext); + } + } + } + }); + } + } } return span; } diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/TracingExecutionInterceptor.java b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/TracingExecutionInterceptor.java index 2dbc7cca6c2..6713a594c51 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/TracingExecutionInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/TracingExecutionInterceptor.java @@ -11,7 +11,9 @@ import datadog.trace.api.TracePropagationStyle; import datadog.trace.bootstrap.ContextStore; import datadog.trace.bootstrap.InstanceStore; +import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.core.SdkRequest; @@ -54,9 +56,10 @@ public void afterMarshalling( final Context.AfterMarshalling context, final ExecutionAttributes executionAttributes) { final AgentSpan span = executionAttributes.getAttribute(SPAN_ATTRIBUTE); if (span != null) { - DECORATE.onRequest(span, context.httpRequest()); - DECORATE.onSdkRequest(span, context.request()); - DECORATE.onAttributes(span, executionAttributes); + try (AgentScope ignored = activateSpan(span)) { + DECORATE.onRequest(span, context.httpRequest()); + DECORATE.onSdkRequest(span, context.request(), executionAttributes); + } } } @@ -103,7 +106,7 @@ public void afterExecution( if (span != null) { executionAttributes.putAttribute(SPAN_ATTRIBUTE, null); // Call onResponse on both types of responses: - DECORATE.onResponse(span, context.response()); + DECORATE.onSdkResponse(span, context.response(), executionAttributes); DECORATE.onResponse(span, context.httpResponse()); DECORATE.beforeFinish(span); span.finish(); @@ -123,7 +126,17 @@ public void onExecutionFailure( final AgentSpan span = executionAttributes.getAttribute(SPAN_ATTRIBUTE); if (span != null) { executionAttributes.putAttribute(SPAN_ATTRIBUTE, null); - DECORATE.onError(span, context.exception()); + Optional responseOpt = context.response(); + if (responseOpt.isPresent()) { + SdkResponse response = responseOpt.get(); + DECORATE.onSdkResponse(span, response, executionAttributes); + DECORATE.onResponse(span, response.sdkHttpResponse()); + if (span.isError()) { + DECORATE.onError(span, context.exception()); + } + } else { + DECORATE.onError(span, context.exception()); + } DECORATE.beforeFinish(span); span.finish(); } diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/test/groovy/Aws2ClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/test/groovy/Aws2ClientTest.groovy index 997827b3f21..b9a6ec826fd 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/test/groovy/Aws2ClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/test/groovy/Aws2ClientTest.groovy @@ -408,7 +408,7 @@ abstract class Aws2ClientTest extends VersionedNamingTestBase { } } -class Aws2ClientV0Test extends Aws2ClientTest { +class Aws2ClientV0ForkedTest extends Aws2ClientTest { @Override String expectedOperation(String awsService, String awsOperation) { diff --git a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/MessageExtractAdapter.java b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/MessageExtractAdapter.java index 669f7cd1780..bea9bdd3c54 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/MessageExtractAdapter.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/MessageExtractAdapter.java @@ -1,7 +1,9 @@ package datadog.trace.instrumentation.aws.v1.sqs; import com.amazonaws.services.sqs.model.Message; +import com.amazonaws.services.sqs.model.MessageAttributeValue; import datadog.trace.bootstrap.instrumentation.api.AgentPropagation; +import datadog.trace.bootstrap.instrumentation.messaging.DatadogAttributeParser; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -13,22 +15,27 @@ public final class MessageExtractAdapter implements AgentPropagation.ContextVisi @Override public void forEachKey(Message carrier, AgentPropagation.KeyClassifier classifier) { - for (Map.Entry entry : carrier.getAttributes().entrySet()) { - String key = entry.getKey(); - if ("AWSTraceHeader".equalsIgnoreCase(key)) { - key = "X-Amzn-Trace-Id"; - } - if (!classifier.accept(key, entry.getValue())) { - return; + Map systemAttributes = carrier.getAttributes(); + if (systemAttributes.containsKey("AWSTraceHeader")) { + // alias 'AWSTraceHeader' to 'X-Amzn-Trace-Id' because it uses the same format + classifier.accept("X-Amzn-Trace-Id", systemAttributes.get("AWSTraceHeader")); + } + Map messageAttributes = carrier.getMessageAttributes(); + if (messageAttributes.containsKey("_datadog")) { + MessageAttributeValue datadog = messageAttributes.get("_datadog"); + if ("String".equals(datadog.getDataType())) { + DatadogAttributeParser.forEachProperty(classifier, datadog.getStringValue()); + } else if ("Binary".equals(datadog.getDataType())) { + DatadogAttributeParser.forEachProperty(classifier, datadog.getBinaryValue()); } } } public long extractTimeInQueueStart(final Message carrier) { try { - Map attributes = carrier.getAttributes(); - if (attributes.containsKey("SentTimestamp")) { - return Long.parseLong(attributes.get("SentTimestamp")); + Map systemAttributes = carrier.getAttributes(); + if (systemAttributes.containsKey("SentTimestamp")) { + return Long.parseLong(systemAttributes.get("SentTimestamp")); } } catch (Exception e) { log.debug("Unable to get SQS sent time", e); diff --git a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsDecorator.java b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsDecorator.java index 1a9f3433636..2c05ccd0cdf 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsDecorator.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsDecorator.java @@ -27,11 +27,14 @@ public class SqsDecorator extends MessagingClientDecorator { private final CharSequence spanType; private final String serviceName; - private static final String LOCAL_SERVICE_NAME = - SQS_LEGACY_TRACING ? "sqs" : Config.get().getServiceName(); - public static final SqsDecorator CONSUMER_DECORATE = - new SqsDecorator(SPAN_KIND_CONSUMER, MESSAGE_CONSUMER, LOCAL_SERVICE_NAME); + new SqsDecorator( + SPAN_KIND_CONSUMER, + MESSAGE_CONSUMER, + SpanNaming.instance() + .namingSchema() + .messaging() + .inboundService("sqs", SQS_LEGACY_TRACING)); public static final SqsDecorator BROKER_DECORATE = new SqsDecorator( diff --git a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy index 583915eb396..4eace5c0114 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy @@ -1,4 +1,5 @@ import static datadog.trace.agent.test.utils.TraceUtils.basicSpan +import static java.nio.charset.StandardCharsets.UTF_8 import com.amazon.sqs.javamessaging.ProviderConfiguration import com.amazon.sqs.javamessaging.SQSConnectionFactory @@ -7,6 +8,8 @@ import com.amazonaws.auth.AWSStaticCredentialsProvider import com.amazonaws.auth.AnonymousAWSCredentials import com.amazonaws.client.builder.AwsClientBuilder import com.amazonaws.services.sqs.AmazonSQSClientBuilder +import com.amazonaws.services.sqs.model.Message +import com.amazonaws.services.sqs.model.MessageAttributeValue import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.agent.test.utils.TraceUtils import datadog.trace.api.Config @@ -16,6 +19,7 @@ import datadog.trace.api.config.GeneralConfig import datadog.trace.api.naming.SpanNaming import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags +import datadog.trace.instrumentation.aws.v1.sqs.TracingList import org.elasticmq.rest.sqs.SQSRestServerBuilder import spock.lang.Shared @@ -147,6 +151,86 @@ abstract class SqsClientTest extends VersionedNamingTestBase { client.shutdown() } + def "trace details propagated via embedded SQS message attribute (string)"() { + setup: + TEST_WRITER.clear() + + when: + def message = new Message() + message.addMessageAttributesEntry('_datadog', new MessageAttributeValue().withDataType('String').withStringValue( + "{\"x-datadog-trace-id\": \"4948377316357291421\", \"x-datadog-parent-id\": \"6746998015037429512\", \"x-datadog-sampling-priority\": \"1\"}" + )) + def messages = new TracingList([message], "http://localhost:${address.port}/000000000000/somequeue") + + messages.forEach {/* consume to create message spans */ } + + then: + assertTraces(1) { + trace(1) { + span { + serviceName expectedService("SQS", "ReceiveMessage") + operationName expectedOperation("SQS", "ReceiveMessage") + resourceName "SQS.ReceiveMessage" + spanType DDSpanTypes.MESSAGE_CONSUMER + errored false + measured true + traceId(4948377316357291421 as BigInteger) + parentSpanId(6746998015037429512 as BigInteger) + tags { + "$Tags.COMPONENT" "java-aws-sdk" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CONSUMER + "aws.service" "AmazonSQS" + "aws_service" "sqs" + "aws.operation" "ReceiveMessageRequest" + "aws.agent" "java-aws-sdk" + "aws.queue.url" "http://localhost:${address.port}/000000000000/somequeue" + defaultTags(true) + } + } + } + } + } + + def "trace details propagated via embedded SQS message attribute (binary)"() { + setup: + TEST_WRITER.clear() + + when: + def message = new Message() + message.addMessageAttributesEntry('_datadog', new MessageAttributeValue().withDataType('Binary').withBinaryValue( + UTF_8.encode('eyJ4LWRhdGFkb2ctdHJhY2UtaWQiOiI0OTQ4Mzc3MzE2MzU3MjkxNDIxIiwieC1kYXRhZG9nLXBhcmVudC1pZCI6IjY3NDY5OTgwMTUwMzc0Mjk1MTIiLCJ4LWRhdGFkb2ctc2FtcGxpbmctcHJpb3JpdHkiOiIxIn0=') + )) + def messages = new TracingList([message], "http://localhost:${address.port}/000000000000/somequeue") + + messages.forEach {/* consume to create message spans */ } + + then: + assertTraces(1) { + trace(1) { + span { + serviceName expectedService("SQS", "ReceiveMessage") + operationName expectedOperation("SQS", "ReceiveMessage") + resourceName "SQS.ReceiveMessage" + spanType DDSpanTypes.MESSAGE_CONSUMER + errored false + measured true + traceId(4948377316357291421 as BigInteger) + parentSpanId(6746998015037429512 as BigInteger) + tags { + "$Tags.COMPONENT" "java-aws-sdk" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CONSUMER + "aws.service" "AmazonSQS" + "aws_service" "sqs" + "aws.operation" "ReceiveMessageRequest" + "aws.agent" "java-aws-sdk" + "aws.queue.url" "http://localhost:${address.port}/000000000000/somequeue" + defaultTags(true) + } + } + } + } + } + def "trace details propagated from SQS to JMS"() { setup: def client = AmazonSQSClientBuilder.standard() @@ -272,7 +356,7 @@ abstract class SqsClientTest extends VersionedNamingTestBase { } trace(1) { span { - serviceName SpanNaming.instance().namingSchema().messaging().inboundService(Config.get().getServiceName(), "jms") + serviceName SpanNaming.instance().namingSchema().messaging().inboundService("jms", Config.get().isLegacyTracingEnabled(true, "jms")) ?: Config.get().getServiceName() operationName SpanNaming.instance().namingSchema().messaging().inboundOperation("jms") resourceName "Consumed from Queue somequeue" spanType DDSpanTypes.MESSAGE_CONSUMER diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/MessageExtractAdapter.java b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/MessageExtractAdapter.java index d07d68074d8..18ed90ff4a2 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/MessageExtractAdapter.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/MessageExtractAdapter.java @@ -1,11 +1,8 @@ package datadog.trace.instrumentation.aws.v2.sqs; -import static datadog.trace.bootstrap.instrumentation.api.PathwayContext.DATADOG_KEY; - -import datadog.trace.api.Config; import datadog.trace.bootstrap.instrumentation.api.AgentPropagation; +import datadog.trace.bootstrap.instrumentation.messaging.DatadogAttributeParser; import java.util.Map; -import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.services.sqs.model.Message; @@ -18,38 +15,27 @@ public final class MessageExtractAdapter implements AgentPropagation.ContextVisi @Override public void forEachKey(Message carrier, AgentPropagation.KeyClassifier classifier) { - - for (Map.Entry entry : carrier.attributesAsStrings().entrySet()) { - String key = entry.getKey(); - if ("AWSTraceHeader".equalsIgnoreCase(key)) { - key = "X-Amzn-Trace-Id"; - } - if (!classifier.accept(key, entry.getValue())) { - return; - } + Map systemAttributes = carrier.attributesAsStrings(); + if (systemAttributes.containsKey("AWSTraceHeader")) { + // alias 'AWSTraceHeader' to 'X-Amzn-Trace-Id' because it uses the same format + classifier.accept("X-Amzn-Trace-Id", systemAttributes.get("AWSTraceHeader")); } - - if (Config.get().isDataStreamsEnabled()) { - for (Map.Entry entry : - carrier.messageAttributes().entrySet()) { - String key = entry.getKey(); - if (key.equalsIgnoreCase(DATADOG_KEY)) { - Optional value = entry.getValue().getValueForField("StringValue", String.class); - if (value.isPresent()) { - if (!classifier.accept(key, value.get())) { - return; - } - } - } + Map messageAttributes = carrier.messageAttributes(); + if (messageAttributes.containsKey("_datadog")) { + MessageAttributeValue datadog = messageAttributes.get("_datadog"); + if ("String".equals(datadog.dataType())) { + DatadogAttributeParser.forEachProperty(classifier, datadog.stringValue()); + } else if ("Binary".equals(datadog.dataType()) && null != datadog.binaryValue()) { + DatadogAttributeParser.forEachProperty(classifier, datadog.binaryValue().asByteBuffer()); } } } public long extractTimeInQueueStart(final Message carrier) { try { - Map attributes = carrier.attributesAsStrings(); - if (attributes.containsKey("SentTimestamp")) { - return Long.parseLong(attributes.get("SentTimestamp")); + Map systemAttributes = carrier.attributesAsStrings(); + if (systemAttributes.containsKey("SentTimestamp")) { + return Long.parseLong(systemAttributes.get("SentTimestamp")); } } catch (Exception e) { log.debug("Unable to get SQS sent time", e); diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsDecorator.java b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsDecorator.java index 65e4bdf08a1..914fb33bfe4 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsDecorator.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsDecorator.java @@ -28,11 +28,14 @@ public class SqsDecorator extends MessagingClientDecorator { private final CharSequence spanType; private final String serviceName; - private static final String LOCAL_SERVICE_NAME = - SQS_LEGACY_TRACING ? "sqs" : Config.get().getServiceName(); - public static final SqsDecorator CONSUMER_DECORATE = - new SqsDecorator(SPAN_KIND_CONSUMER, MESSAGE_CONSUMER, LOCAL_SERVICE_NAME); + new SqsDecorator( + SPAN_KIND_CONSUMER, + MESSAGE_CONSUMER, + SpanNaming.instance() + .namingSchema() + .messaging() + .inboundService("sqs", SQS_LEGACY_TRACING)); public static final SqsDecorator BROKER_DECORATE = new SqsDecorator( diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java index a81ba720195..ee58cbf3538 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java @@ -91,7 +91,7 @@ protected void startNewMessageSpan(Message message) { sortedTags.put(DIRECTION_TAG, DIRECTION_IN); sortedTags.put(TOPIC_TAG, urlFileName(queueUrl)); sortedTags.put(TYPE_TAG, "sqs"); - AgentTracer.get().setDataStreamCheckpoint(span, sortedTags, 0); + AgentTracer.get().getDataStreamsMonitoring().setCheckpoint(span, sortedTags, 0, 0); CONSUMER_DECORATE.afterStart(span); CONSUMER_DECORATE.onConsume(span, queueUrl, requestId); diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy index c471dfa31f1..05b5b5a0971 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy @@ -1,28 +1,32 @@ -import datadog.trace.api.DDTags -import datadog.trace.core.datastreams.StatsGroup -import spock.lang.IgnoreIf - import static datadog.trace.agent.test.utils.TraceUtils.basicSpan +import static java.nio.charset.StandardCharsets.UTF_8 import com.amazon.sqs.javamessaging.ProviderConfiguration import com.amazon.sqs.javamessaging.SQSConnectionFactory import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.agent.test.utils.TraceUtils import datadog.trace.api.Config +import datadog.trace.api.DDTags import datadog.trace.api.DDSpanId import datadog.trace.api.DDSpanTypes import datadog.trace.api.config.GeneralConfig import datadog.trace.api.naming.SpanNaming import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags +import datadog.trace.core.datastreams.StatsGroup +import datadog.trace.instrumentation.aws.v2.sqs.TracingList import org.elasticmq.rest.sqs.SQSRestServerBuilder import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider +import software.amazon.awssdk.core.SdkBytes import software.amazon.awssdk.core.SdkSystemSetting import software.amazon.awssdk.regions.Region import software.amazon.awssdk.services.sqs.SqsClient import software.amazon.awssdk.services.sqs.model.CreateQueueRequest import software.amazon.awssdk.services.sqs.model.ReceiveMessageRequest import software.amazon.awssdk.services.sqs.model.SendMessageRequest +import software.amazon.awssdk.services.sqs.model.Message +import software.amazon.awssdk.services.sqs.model.MessageAttributeValue +import spock.lang.IgnoreIf import spock.lang.Shared import javax.jms.Session @@ -182,6 +186,92 @@ abstract class SqsClientTest extends VersionedNamingTestBase { client.close() } + @IgnoreIf({instance.isDataStreamsEnabled()}) + def "trace details propagated via embedded SQS message attribute (string)"() { + setup: + TEST_WRITER.clear() + + when: + def message = Message.builder().messageAttributes(['_datadog': MessageAttributeValue.builder().dataType('String').stringValue( + "{\"x-datadog-trace-id\": \"4948377316357291421\", \"x-datadog-parent-id\": \"6746998015037429512\", \"x-datadog-sampling-priority\": \"1\"}" + ).build()]).build() + def messages = new TracingList([message], + "http://localhost:${address.port}/000000000000/somequeue", + "00000000-0000-0000-0000-000000000000") + + messages.forEach {/* consume to create message spans */ } + + then: + assertTraces(1) { + trace(1) { + span { + serviceName expectedService("Sqs", "ReceiveMessage") + operationName expectedOperation("Sqs", "ReceiveMessage") + resourceName "Sqs.ReceiveMessage" + spanType DDSpanTypes.MESSAGE_CONSUMER + errored false + measured true + traceId(4948377316357291421 as BigInteger) + parentSpanId(6746998015037429512 as BigInteger) + tags { + "$Tags.COMPONENT" "java-aws-sdk" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CONSUMER + "aws.service" "Sqs" + "aws_service" "Sqs" + "aws.operation" "ReceiveMessage" + "aws.agent" "java-aws-sdk" + "aws.queue.url" "http://localhost:${address.port}/000000000000/somequeue" + "aws.requestId" "00000000-0000-0000-0000-000000000000" + defaultTags(true) + } + } + } + } + } + + @IgnoreIf({instance.isDataStreamsEnabled()}) + def "trace details propagated via embedded SQS message attribute (binary)"() { + setup: + TEST_WRITER.clear() + + when: + def message = Message.builder().messageAttributes(['_datadog': MessageAttributeValue.builder().dataType('Binary').binaryValue(SdkBytes.fromByteBuffer( + UTF_8.encode('eyJ4LWRhdGFkb2ctdHJhY2UtaWQiOiI0OTQ4Mzc3MzE2MzU3MjkxNDIxIiwieC1kYXRhZG9nLXBhcmVudC1pZCI6IjY3NDY5OTgwMTUwMzc0Mjk1MTIiLCJ4LWRhdGFkb2ctc2FtcGxpbmctcHJpb3JpdHkiOiIxIn0=') + )).build()]).build() + def messages = new TracingList([message], + "http://localhost:${address.port}/000000000000/somequeue", + "00000000-0000-0000-0000-000000000000") + + messages.forEach {/* consume to create message spans */ } + + then: + assertTraces(1) { + trace(1) { + span { + serviceName expectedService("Sqs", "ReceiveMessage") + operationName expectedOperation("Sqs", "ReceiveMessage") + resourceName "Sqs.ReceiveMessage" + spanType DDSpanTypes.MESSAGE_CONSUMER + errored false + measured true + traceId(4948377316357291421 as BigInteger) + parentSpanId(6746998015037429512 as BigInteger) + tags { + "$Tags.COMPONENT" "java-aws-sdk" + "$Tags.SPAN_KIND" Tags.SPAN_KIND_CONSUMER + "aws.service" "Sqs" + "aws_service" "Sqs" + "aws.operation" "ReceiveMessage" + "aws.agent" "java-aws-sdk" + "aws.queue.url" "http://localhost:${address.port}/000000000000/somequeue" + "aws.requestId" "00000000-0000-0000-0000-000000000000" + defaultTags(true) + } + } + } + } + } + @IgnoreIf({instance.isDataStreamsEnabled()}) def "trace details propagated from SQS to JMS"() { setup: @@ -311,7 +401,7 @@ abstract class SqsClientTest extends VersionedNamingTestBase { } trace(1) { span { - serviceName SpanNaming.instance().namingSchema().messaging().inboundService(Config.get().getServiceName(), "jms") + serviceName SpanNaming.instance().namingSchema().messaging().inboundService("jms", Config.get().isLegacyTracingEnabled(true, "jms")) ?: Config.get().getServiceName() operationName SpanNaming.instance().namingSchema().messaging().inboundOperation("jms") resourceName "Consumed from Queue somequeue" spanType DDSpanTypes.MESSAGE_CONSUMER @@ -386,13 +476,6 @@ class SqsClientV1ForkedTest extends SqsClientTest { } class SqsClientV0DataStreamsTest extends SqsClientTest { - - @Override - protected void configurePreAgent() { - super.configurePreAgent() - injectSysConfig("dd.data.streams.enabled", "true") - } - @Override String expectedOperation(String awsService, String awsOperation) { "aws.http" diff --git a/dd-java-agent/instrumentation/build.gradle b/dd-java-agent/instrumentation/build.gradle index 16a54bb5e3f..3847c8b9dd0 100644 --- a/dd-java-agent/instrumentation/build.gradle +++ b/dd-java-agent/instrumentation/build.gradle @@ -14,6 +14,7 @@ plugins { } apply from: "$rootDir/gradle/java.gradle" + tasks.register("latestDepTest") Project parent_project = project @@ -120,8 +121,23 @@ dependencies { implementation project(':dd-java-agent:agent-builder') } +if (project.gradle.startParameter.taskNames.any {it.endsWith("generateMuzzleReport")}) { + apply plugin: 'muzzle' + task("muzzleInstrumentationReport") { + dependsOn(project.getAllTasks(true).values().flatten().findAll { it.name.endsWith("generateMuzzleReport") }) + finalizedBy(tasks.named('mergeMuzzleReports')) + } +} + + tasks.named('shadowJar').configure { duplicatesStrategy = DuplicatesStrategy.FAIL + dependencies { + // the tracer is now in a separate shadow jar + exclude(project(":dd-trace-core")) + exclude(dependency('com.datadoghq:sketches-java')) + exclude(dependency('com.google.re2j:re2j')) + } dependencies deps.excludeShared } diff --git a/dd-java-agent/instrumentation/commons-httpclient-2/src/main/java/datadog/trace/instrumentation/commonshttpclient/IastHttpMethodBaseInstrumentation.java b/dd-java-agent/instrumentation/commons-httpclient-2/src/main/java/datadog/trace/instrumentation/commonshttpclient/IastHttpMethodBaseInstrumentation.java index 8371ad69442..11522f224fd 100644 --- a/dd-java-agent/instrumentation/commons-httpclient-2/src/main/java/datadog/trace/instrumentation/commonshttpclient/IastHttpMethodBaseInstrumentation.java +++ b/dd-java-agent/instrumentation/commons-httpclient-2/src/main/java/datadog/trace/instrumentation/commonshttpclient/IastHttpMethodBaseInstrumentation.java @@ -51,7 +51,7 @@ public static void afterCtor( @Advice.This final Object self, @Advice.Argument(0) final Object argument) { final PropagationModule module = InstrumentationBridge.PROPAGATION; if (module != null) { - module.taintIfInputIsTainted(self, argument); + module.taintIfTainted(self, argument); } } } diff --git a/dd-java-agent/instrumentation/commons-httpclient-2/src/test/groovy/IastCommonsHttpClientInstrumentationTest.groovy b/dd-java-agent/instrumentation/commons-httpclient-2/src/test/groovy/IastCommonsHttpClientInstrumentationTest.groovy index 3624973f60e..0a3f5ba5bc7 100644 --- a/dd-java-agent/instrumentation/commons-httpclient-2/src/test/groovy/IastCommonsHttpClientInstrumentationTest.groovy +++ b/dd-java-agent/instrumentation/commons-httpclient-2/src/test/groovy/IastCommonsHttpClientInstrumentationTest.groovy @@ -51,7 +51,7 @@ class IastCommonsHttpClientInstrumentationTest extends AgentTestRunner { private void mockPropagation() { final propagation = Mock(PropagationModule) { - taintIfInputIsTainted(_, _) >> { + taintIfTainted(_, _) >> { if (tainteds.containsKey(it[1])) { tainteds.put(it[0], null) } diff --git a/dd-java-agent/instrumentation/commons-lang-2/src/main/java/datadog/trace/instrumentation/commonslang/StringEscapeUtilsCallSite.java b/dd-java-agent/instrumentation/commons-lang-2/src/main/java/datadog/trace/instrumentation/commonslang/StringEscapeUtilsCallSite.java index 63e71ac0325..d7fdd84417d 100644 --- a/dd-java-agent/instrumentation/commons-lang-2/src/main/java/datadog/trace/instrumentation/commonslang/StringEscapeUtilsCallSite.java +++ b/dd-java-agent/instrumentation/commons-lang-2/src/main/java/datadog/trace/instrumentation/commonslang/StringEscapeUtilsCallSite.java @@ -6,7 +6,7 @@ import datadog.trace.api.iast.Propagation; import datadog.trace.api.iast.VulnerabilityMarks; import datadog.trace.api.iast.propagation.PropagationModule; -import javax.annotation.Nonnull; +import javax.annotation.Nullable; @Propagation @CallSite(spi = IastCallSites.class) @@ -21,11 +21,11 @@ public class StringEscapeUtilsCallSite { @CallSite.After( "java.lang.String org.apache.commons.lang.StringEscapeUtils.escapeXml(java.lang.String)") public static String afterEscape( - @CallSite.Argument(0) @Nonnull final String input, @CallSite.Return final String result) { + @CallSite.Argument(0) @Nullable final String input, @CallSite.Return final String result) { final PropagationModule module = InstrumentationBridge.PROPAGATION; if (module != null) { try { - module.taintIfInputIsTaintedWithMarks(result, input, VulnerabilityMarks.XSS_MARK); + module.taintIfTainted(result, input, false, VulnerabilityMarks.XSS_MARK); } catch (final Throwable e) { module.onUnexpectedException("afterEscape threw", e); } @@ -36,11 +36,11 @@ public static String afterEscape( @CallSite.After( "java.lang.String org.apache.commons.lang.StringEscapeUtils.escapeSql(java.lang.String)") public static String afterEscapeSQL( - @CallSite.Argument(0) @Nonnull final String input, @CallSite.Return final String result) { + @CallSite.Argument(0) @Nullable final String input, @CallSite.Return final String result) { final PropagationModule module = InstrumentationBridge.PROPAGATION; if (module != null) { try { - module.taintIfInputIsTaintedWithMarks(result, input, VulnerabilityMarks.SQL_INJECTION_MARK); + module.taintIfTainted(result, input, false, VulnerabilityMarks.SQL_INJECTION_MARK); } catch (final Throwable e) { module.onUnexpectedException("afterEscapeSQL threw", e); } diff --git a/dd-java-agent/instrumentation/commons-lang-2/src/test/groovy/datadog/trace/instrumentation/commonslang/StringEscapeUtilsCallSiteTest.groovy b/dd-java-agent/instrumentation/commons-lang-2/src/test/groovy/datadog/trace/instrumentation/commonslang/StringEscapeUtilsCallSiteTest.groovy index 7a4d9457619..348aaa57503 100644 --- a/dd-java-agent/instrumentation/commons-lang-2/src/test/groovy/datadog/trace/instrumentation/commonslang/StringEscapeUtilsCallSiteTest.groovy +++ b/dd-java-agent/instrumentation/commons-lang-2/src/test/groovy/datadog/trace/instrumentation/commonslang/StringEscapeUtilsCallSiteTest.groovy @@ -27,7 +27,7 @@ class StringEscapeUtilsCallSiteTest extends AgentTestRunner { then: result == expected - 1 * module.taintIfInputIsTaintedWithMarks(_ as String, args[0], mark) + 1 * module.taintIfTainted(_ as String, args[0], false, mark) 0 * _ where: diff --git a/dd-java-agent/instrumentation/commons-lang-3/src/main/java/datadog/trace/instrumentation/commonslang3/StringEscapeUtilsCallSite.java b/dd-java-agent/instrumentation/commons-lang-3/src/main/java/datadog/trace/instrumentation/commonslang3/StringEscapeUtilsCallSite.java index e151128f760..b52f57c57e9 100644 --- a/dd-java-agent/instrumentation/commons-lang-3/src/main/java/datadog/trace/instrumentation/commonslang3/StringEscapeUtilsCallSite.java +++ b/dd-java-agent/instrumentation/commons-lang-3/src/main/java/datadog/trace/instrumentation/commonslang3/StringEscapeUtilsCallSite.java @@ -6,7 +6,7 @@ import datadog.trace.api.iast.Propagation; import datadog.trace.api.iast.VulnerabilityMarks; import datadog.trace.api.iast.propagation.PropagationModule; -import javax.annotation.Nonnull; +import javax.annotation.Nullable; @Propagation @CallSite(spi = IastCallSites.class) @@ -23,11 +23,11 @@ public class StringEscapeUtilsCallSite { @CallSite.After( "java.lang.String org.apache.commons.lang3.StringEscapeUtils.escapeEcmaScript(java.lang.String)") public static String afterEscape( - @CallSite.Argument(0) @Nonnull final String input, @CallSite.Return final String result) { + @CallSite.Argument(0) @Nullable final String input, @CallSite.Return final String result) { final PropagationModule module = InstrumentationBridge.PROPAGATION; if (module != null) { try { - module.taintIfInputIsTaintedWithMarks(result, input, VulnerabilityMarks.XSS_MARK); + module.taintIfTainted(result, input, false, VulnerabilityMarks.XSS_MARK); } catch (final Throwable e) { module.onUnexpectedException("afterEscape threw", e); } @@ -38,11 +38,11 @@ public static String afterEscape( @CallSite.After( "java.lang.String org.apache.commons.lang3.StringEscapeUtils.escapeJson(java.lang.String)") public static String afterEscapeJson( - @CallSite.Argument(0) @Nonnull final String input, @CallSite.Return final String result) { + @CallSite.Argument(0) @Nullable final String input, @CallSite.Return final String result) { final PropagationModule module = InstrumentationBridge.PROPAGATION; if (module != null) { try { - module.taintIfInputIsTainted(result, input); + module.taintIfTainted(result, input); } catch (final Throwable e) { module.onUnexpectedException("afterEscapeJson threw", e); } diff --git a/dd-java-agent/instrumentation/commons-lang-3/src/test/groovy/datadog/trace/instrumentation/commonslang3/StringEscapeUtilsCallSiteTest.groovy b/dd-java-agent/instrumentation/commons-lang-3/src/test/groovy/datadog/trace/instrumentation/commonslang3/StringEscapeUtilsCallSiteTest.groovy index dc7147fe6e4..18730b70612 100644 --- a/dd-java-agent/instrumentation/commons-lang-3/src/test/groovy/datadog/trace/instrumentation/commonslang3/StringEscapeUtilsCallSiteTest.groovy +++ b/dd-java-agent/instrumentation/commons-lang-3/src/test/groovy/datadog/trace/instrumentation/commonslang3/StringEscapeUtilsCallSiteTest.groovy @@ -25,7 +25,7 @@ class StringEscapeUtilsCallSiteTest extends AgentTestRunner { then: result == expected - 1 * module.taintIfInputIsTaintedWithMarks(_ as String, args[0], VulnerabilityMarks.XSS_MARK) + 1 * module.taintIfTainted(_ as String, args[0], false, VulnerabilityMarks.XSS_MARK) 0 * _ where: @@ -47,7 +47,7 @@ class StringEscapeUtilsCallSiteTest extends AgentTestRunner { then: result == expected - 1 * module.taintIfInputIsTainted(_ as String, args[0]) + 1 * module.taintIfTainted(_ as String, args[0]) 0 * _ where: diff --git a/dd-java-agent/instrumentation/commons-text/src/main/java/datadog/trace/instrumentation/commonstext/StringEscapeUtilsCallSite.java b/dd-java-agent/instrumentation/commons-text/src/main/java/datadog/trace/instrumentation/commonstext/StringEscapeUtilsCallSite.java index 68620efd078..ebd592fa241 100644 --- a/dd-java-agent/instrumentation/commons-text/src/main/java/datadog/trace/instrumentation/commonstext/StringEscapeUtilsCallSite.java +++ b/dd-java-agent/instrumentation/commons-text/src/main/java/datadog/trace/instrumentation/commonstext/StringEscapeUtilsCallSite.java @@ -6,7 +6,7 @@ import datadog.trace.api.iast.Propagation; import datadog.trace.api.iast.VulnerabilityMarks; import datadog.trace.api.iast.propagation.PropagationModule; -import javax.annotation.Nonnull; +import javax.annotation.Nullable; @Propagation @CallSite(spi = IastCallSites.class) @@ -25,11 +25,11 @@ public class StringEscapeUtilsCallSite { @CallSite.After( "java.lang.String org.apache.commons.text.StringEscapeUtils.escapeXml11(java.lang.String)") public static String afterEscape( - @CallSite.Argument(0) @Nonnull final String input, @CallSite.Return final String result) { + @CallSite.Argument(0) @Nullable final String input, @CallSite.Return final String result) { final PropagationModule module = InstrumentationBridge.PROPAGATION; if (module != null) { try { - module.taintIfInputIsTaintedWithMarks(result, input, VulnerabilityMarks.XSS_MARK); + module.taintIfTainted(result, input, false, VulnerabilityMarks.XSS_MARK); } catch (final Throwable e) { module.onUnexpectedException("afterEscape threw", e); } @@ -40,11 +40,11 @@ public static String afterEscape( @CallSite.After( "java.lang.String org.apache.commons.text.StringEscapeUtils.escapeJson(java.lang.String)") public static String afterEscapeJson( - @CallSite.Argument(0) @Nonnull final String input, @CallSite.Return final String result) { + @CallSite.Argument(0) @Nullable final String input, @CallSite.Return final String result) { final PropagationModule module = InstrumentationBridge.PROPAGATION; if (module != null) { try { - module.taintIfInputIsTainted(result, input); + module.taintIfTainted(result, input); } catch (final Throwable e) { module.onUnexpectedException("afterEscapeJson threw", e); } diff --git a/dd-java-agent/instrumentation/commons-text/src/test/groovy/datadog/trace/instrumentation/commonstext/StringEscapeUtilsCallSiteTest.groovy b/dd-java-agent/instrumentation/commons-text/src/test/groovy/datadog/trace/instrumentation/commonstext/StringEscapeUtilsCallSiteTest.groovy index 12296ad58c2..440e31ea3be 100644 --- a/dd-java-agent/instrumentation/commons-text/src/test/groovy/datadog/trace/instrumentation/commonstext/StringEscapeUtilsCallSiteTest.groovy +++ b/dd-java-agent/instrumentation/commons-text/src/test/groovy/datadog/trace/instrumentation/commonstext/StringEscapeUtilsCallSiteTest.groovy @@ -25,7 +25,7 @@ class StringEscapeUtilsCallSiteTest extends AgentTestRunner { then: result == expected - 1 * module.taintIfInputIsTaintedWithMarks(_ as String, args[0], VulnerabilityMarks.XSS_MARK) + 1 * module.taintIfTainted(_ as String, args[0], false, VulnerabilityMarks.XSS_MARK) 0 * _ where: @@ -48,7 +48,7 @@ class StringEscapeUtilsCallSiteTest extends AgentTestRunner { then: result == expected - 1 * module.taintIfInputIsTainted(_ as String, args[0]) + 1 * module.taintIfTainted(_ as String, args[0]) 0 * _ where: diff --git a/dd-java-agent/instrumentation/couchbase/couchbase-2.0/src/main/java/datadog/trace/instrumentation/couchbase/client/CouchbaseClientDecorator.java b/dd-java-agent/instrumentation/couchbase/couchbase-2.0/src/main/java/datadog/trace/instrumentation/couchbase/client/CouchbaseClientDecorator.java index 7afffe39500..cd592761951 100644 --- a/dd-java-agent/instrumentation/couchbase/couchbase-2.0/src/main/java/datadog/trace/instrumentation/couchbase/client/CouchbaseClientDecorator.java +++ b/dd-java-agent/instrumentation/couchbase/couchbase-2.0/src/main/java/datadog/trace/instrumentation/couchbase/client/CouchbaseClientDecorator.java @@ -1,6 +1,5 @@ package datadog.trace.instrumentation.couchbase.client; -import datadog.trace.api.Config; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; @@ -11,10 +10,7 @@ class CouchbaseClientDecorator extends DBTypeProcessingDatabaseClientDecorator { private static final String DB_TYPE = "couchbase"; private static final String SERVICE_NAME = - SpanNaming.instance() - .namingSchema() - .database() - .service(Config.get().getServiceName(), DB_TYPE); + SpanNaming.instance().namingSchema().database().service(DB_TYPE); public static final CharSequence OPERATION_NAME = UTF8BytesString.create(SpanNaming.instance().namingSchema().database().operation(DB_TYPE)); diff --git a/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/main/java/datadog/trace/instrumentation/couchbase_31/client/CouchbaseClientDecorator.java b/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/main/java/datadog/trace/instrumentation/couchbase_31/client/CouchbaseClientDecorator.java index 8d697120966..a1e62890976 100644 --- a/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/main/java/datadog/trace/instrumentation/couchbase_31/client/CouchbaseClientDecorator.java +++ b/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/main/java/datadog/trace/instrumentation/couchbase_31/client/CouchbaseClientDecorator.java @@ -2,7 +2,6 @@ import static datadog.trace.bootstrap.instrumentation.api.Tags.DB_TYPE; -import datadog.trace.api.Config; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; @@ -11,10 +10,7 @@ class CouchbaseClientDecorator extends DBTypeProcessingDatabaseClientDecorator { private static final String DB_TYPE = "couchbase"; private static final String SERVICE_NAME = - SpanNaming.instance() - .namingSchema() - .database() - .service(Config.get().getServiceName(), DB_TYPE); + SpanNaming.instance().namingSchema().database().service(DB_TYPE); public static final CharSequence OPERATION_NAME = UTF8BytesString.create(SpanNaming.instance().namingSchema().database().operation(DB_TYPE)); public static final CharSequence COUCHBASE_CLIENT = UTF8BytesString.create("couchbase-client"); diff --git a/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/main/java/datadog/trace/instrumentation/couchbase_32/client/CouchbaseClientDecorator.java b/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/main/java/datadog/trace/instrumentation/couchbase_32/client/CouchbaseClientDecorator.java index 854a67b6c75..2b84ad23c07 100644 --- a/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/main/java/datadog/trace/instrumentation/couchbase_32/client/CouchbaseClientDecorator.java +++ b/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/main/java/datadog/trace/instrumentation/couchbase_32/client/CouchbaseClientDecorator.java @@ -2,7 +2,6 @@ import static datadog.trace.bootstrap.instrumentation.api.Tags.DB_TYPE; -import datadog.trace.api.Config; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; @@ -11,10 +10,7 @@ class CouchbaseClientDecorator extends DBTypeProcessingDatabaseClientDecorator { private static final String DB_TYPE = "couchbase"; private static final String SERVICE_NAME = - SpanNaming.instance() - .namingSchema() - .database() - .service(Config.get().getServiceName(), DB_TYPE); + SpanNaming.instance().namingSchema().database().service(DB_TYPE); public static final CharSequence OPERATION_NAME = UTF8BytesString.create(SpanNaming.instance().namingSchema().database().operation(DB_TYPE)); public static final CharSequence COUCHBASE_CLIENT = UTF8BytesString.create("couchbase-client"); diff --git a/dd-java-agent/instrumentation/cucumber/build.gradle b/dd-java-agent/instrumentation/cucumber/build.gradle index 1b5f78a252d..0f8f883d533 100644 --- a/dd-java-agent/instrumentation/cucumber/build.gradle +++ b/dd-java-agent/instrumentation/cucumber/build.gradle @@ -8,31 +8,18 @@ muzzle { } } +addTestSuiteForDir('latestDepTest', 'test') + dependencies { compileOnly group: 'io.cucumber', name: 'cucumber-core', version: '5.4.0' - testImplementation(group: 'org.junit.platform', name: 'junit-platform-launcher') { - version { - strictly '1.8.2' - } - } - testImplementation(group: 'org.junit.platform', name: 'junit-platform-engine') { - version { - strictly '1.8.2' - } - } - testImplementation(group: 'org.junit.platform', name: 'junit-platform-suite') { - version { - strictly '1.8.2' - } - } - testImplementation(group: 'org.junit.platform', name: 'junit-platform-suite-engine') { - version { - strictly '1.8.2' - } - } + testImplementation group: 'org.junit.platform', name: 'junit-platform-launcher', version: '1.9.2' + testImplementation group: 'org.junit.platform', name: 'junit-platform-suite', version: '1.9.2' + testImplementation group: 'org.junit.platform', name: 'junit-platform-suite-engine', version: '1.9.2' - testImplementation group: 'io.cucumber', name: 'cucumber-java', version: '5.4.0' testImplementation group: 'io.cucumber', name: 'cucumber-junit-platform-engine', version: '5.4.0' - testImplementation group: 'io.cucumber', name: 'cucumber-junit', version: '5.4.0' + testImplementation group: 'io.cucumber', name: 'cucumber-java', version: '5.4.0' + + latestDepTestImplementation group: 'io.cucumber', name: 'cucumber-java', version: '+' + latestDepTestImplementation group: 'io.cucumber', name: 'cucumber-junit-platform-engine', version: '+' } diff --git a/dd-java-agent/instrumentation/datastax-cassandra-3/src/main/java/datadog/trace/instrumentation/datastax/cassandra/CassandraClientDecorator.java b/dd-java-agent/instrumentation/datastax-cassandra-3/src/main/java/datadog/trace/instrumentation/datastax/cassandra/CassandraClientDecorator.java index 67ce02a29d1..28d5b32eddf 100644 --- a/dd-java-agent/instrumentation/datastax-cassandra-3/src/main/java/datadog/trace/instrumentation/datastax/cassandra/CassandraClientDecorator.java +++ b/dd-java-agent/instrumentation/datastax-cassandra-3/src/main/java/datadog/trace/instrumentation/datastax/cassandra/CassandraClientDecorator.java @@ -3,7 +3,6 @@ import com.datastax.driver.core.Host; import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.Session; -import datadog.trace.api.Config; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; @@ -13,10 +12,7 @@ public class CassandraClientDecorator extends DBTypeProcessingDatabaseClientDecorator { private static final String DB_TYPE = "cassandra"; private static final String SERVICE_NAME = - SpanNaming.instance() - .namingSchema() - .database() - .service(Config.get().getServiceName(), DB_TYPE); + SpanNaming.instance().namingSchema().database().service(DB_TYPE); public static final CharSequence OPERATION_NAME = UTF8BytesString.create(SpanNaming.instance().namingSchema().database().operation(DB_TYPE)); public static final CharSequence JAVA_CASSANDRA = UTF8BytesString.create("java-cassandra"); diff --git a/dd-java-agent/instrumentation/datastax-cassandra-4/src/main/java/datadog/trace/instrumentation/datastax/cassandra4/CassandraClientDecorator.java b/dd-java-agent/instrumentation/datastax-cassandra-4/src/main/java/datadog/trace/instrumentation/datastax/cassandra4/CassandraClientDecorator.java index e453f28ed35..98817ca2b5d 100644 --- a/dd-java-agent/instrumentation/datastax-cassandra-4/src/main/java/datadog/trace/instrumentation/datastax/cassandra4/CassandraClientDecorator.java +++ b/dd-java-agent/instrumentation/datastax-cassandra-4/src/main/java/datadog/trace/instrumentation/datastax/cassandra4/CassandraClientDecorator.java @@ -5,7 +5,6 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; import com.datastax.oss.driver.api.core.session.Session; -import datadog.trace.api.Config; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; @@ -18,10 +17,7 @@ public class CassandraClientDecorator extends DBTypeProcessingDatabaseClientDecorator { private static final String DB_TYPE = "cassandra"; private static final String SERVICE_NAME = - SpanNaming.instance() - .namingSchema() - .database() - .service(Config.get().getServiceName(), DB_TYPE); + SpanNaming.instance().namingSchema().database().service(DB_TYPE); public static final CharSequence OPERATION_NAME = UTF8BytesString.create(SpanNaming.instance().namingSchema().database().operation(DB_TYPE)); public static final CharSequence JAVA_CASSANDRA = UTF8BytesString.create("java-cassandra"); diff --git a/dd-java-agent/instrumentation/elasticsearch/rest-5/src/latestDepTest/groovy/Elasticsearch6RestClientTest.groovy b/dd-java-agent/instrumentation/elasticsearch/rest-5/src/latestDepTest/groovy/Elasticsearch6RestClientTest.groovy index eb646f790a8..012a4aa4c30 100644 --- a/dd-java-agent/instrumentation/elasticsearch/rest-5/src/latestDepTest/groovy/Elasticsearch6RestClientTest.groovy +++ b/dd-java-agent/instrumentation/elasticsearch/rest-5/src/latestDepTest/groovy/Elasticsearch6RestClientTest.groovy @@ -96,14 +96,16 @@ class Elasticsearch6RestClientTest extends AgentTestRunner { } span { serviceName "elasticsearch" - resourceName "GET _cluster/health" + resourceName "GET /_cluster/health" operationName "http.request" spanType DDSpanTypes.HTTP_CLIENT childOf span(0) tags { "$Tags.COMPONENT" "apache-httpasyncclient" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.HTTP_URL" "_cluster/health" + "$Tags.PEER_HOSTNAME" httpTransportAddress.address + "$Tags.PEER_PORT" httpTransportAddress.port + "$Tags.HTTP_URL" "http://${httpTransportAddress.address}:${httpTransportAddress.port}/_cluster/health" "$Tags.HTTP_METHOD" "GET" "$Tags.HTTP_STATUS" 200 defaultTags() diff --git a/dd-java-agent/instrumentation/elasticsearch/rest-5/src/test/groovy/Elasticsearch5RestClientTest.groovy b/dd-java-agent/instrumentation/elasticsearch/rest-5/src/test/groovy/Elasticsearch5RestClientTest.groovy index 8cdfcdb82ba..5d971c1f028 100644 --- a/dd-java-agent/instrumentation/elasticsearch/rest-5/src/test/groovy/Elasticsearch5RestClientTest.groovy +++ b/dd-java-agent/instrumentation/elasticsearch/rest-5/src/test/groovy/Elasticsearch5RestClientTest.groovy @@ -104,17 +104,20 @@ abstract class Elasticsearch5RestClientTest extends VersionedNamingTestBase { } span { serviceName service() - resourceName "GET _cluster/health" + resourceName "GET /_cluster/health" operationName SpanNaming.instance().namingSchema().client().operationForComponent("apache-httpasyncclient") spanType DDSpanTypes.HTTP_CLIENT childOf span(0) tags { "$Tags.COMPONENT" "apache-httpasyncclient" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.HTTP_URL" "_cluster/health" + "$Tags.PEER_HOSTNAME" httpTransportAddress.address + "$Tags.PEER_PORT" httpTransportAddress.port + "$Tags.HTTP_URL" "http://${httpTransportAddress.address}:${httpTransportAddress.port}/_cluster/health" "$Tags.HTTP_METHOD" "GET" "$Tags.HTTP_STATUS" 200 - defaultTagsNoPeerService() + peerServiceFrom(Tags.PEER_HOSTNAME) + defaultTags() } } } diff --git a/dd-java-agent/instrumentation/elasticsearch/rest-6.4/src/latestDepTest/groovy/Elasticsearch6RestClientTest.groovy b/dd-java-agent/instrumentation/elasticsearch/rest-6.4/src/latestDepTest/groovy/Elasticsearch6RestClientTest.groovy index fdd68212459..72fbcc66a21 100644 --- a/dd-java-agent/instrumentation/elasticsearch/rest-6.4/src/latestDepTest/groovy/Elasticsearch6RestClientTest.groovy +++ b/dd-java-agent/instrumentation/elasticsearch/rest-6.4/src/latestDepTest/groovy/Elasticsearch6RestClientTest.groovy @@ -6,7 +6,6 @@ import groovy.json.JsonSlurper import org.apache.http.HttpHost import org.apache.http.client.config.RequestConfig import org.apache.http.util.EntityUtils -import org.elasticsearch.client.Request import org.elasticsearch.client.Response import org.elasticsearch.client.RestClient import org.elasticsearch.client.RestClientBuilder @@ -71,9 +70,9 @@ class Elasticsearch6RestClientTest extends AgentTestRunner { def "test elasticsearch status"() { setup: + injectSysConfig("httpasyncclient4.legacy.tracing.enabled", "true") - Request request = new Request("GET", "_cluster/health") - Response response = client.performRequest(request) + Response response = client.performRequest("GET", "_cluster/health") Map result = new JsonSlurper().parseText(EntityUtils.toString(response.entity)) @@ -111,7 +110,7 @@ class Elasticsearch6RestClientTest extends AgentTestRunner { "$Tags.HTTP_URL" "_cluster/health" "$Tags.HTTP_METHOD" "GET" "$Tags.HTTP_STATUS" 200 - defaultTags() + defaultTagsNoPeerService() } } } diff --git a/dd-java-agent/instrumentation/elasticsearch/rest-6.4/src/test/groovy/Elasticsearch6RestClientTest.groovy b/dd-java-agent/instrumentation/elasticsearch/rest-6.4/src/test/groovy/Elasticsearch6RestClientTest.groovy index eb646f790a8..bc95893a81a 100644 --- a/dd-java-agent/instrumentation/elasticsearch/rest-6.4/src/test/groovy/Elasticsearch6RestClientTest.groovy +++ b/dd-java-agent/instrumentation/elasticsearch/rest-6.4/src/test/groovy/Elasticsearch6RestClientTest.groovy @@ -33,7 +33,6 @@ class Elasticsearch6RestClientTest extends AgentTestRunner { RestClient client def setupSpec() { - esWorkingDir = File.createTempDir("test-es-working-dir-", "") esWorkingDir.deleteOnExit() println "ES work dir: $esWorkingDir" @@ -67,6 +66,7 @@ class Elasticsearch6RestClientTest extends AgentTestRunner { def "test elasticsearch status"() { setup: + injectSysConfig("httpasyncclient4.legacy.tracing.enabled", "true") Response response = client.performRequest("GET", "_cluster/health") @@ -106,7 +106,7 @@ class Elasticsearch6RestClientTest extends AgentTestRunner { "$Tags.HTTP_URL" "_cluster/health" "$Tags.HTTP_METHOD" "GET" "$Tags.HTTP_STATUS" 200 - defaultTags() + defaultTagsNoPeerService() } } } diff --git a/dd-java-agent/instrumentation/elasticsearch/rest-7/src/test/groovy/Elasticsearch7RestClientTest.groovy b/dd-java-agent/instrumentation/elasticsearch/rest-7/src/test/groovy/Elasticsearch7RestClientTest.groovy index 346cb12c567..e1f527a01f9 100644 --- a/dd-java-agent/instrumentation/elasticsearch/rest-7/src/test/groovy/Elasticsearch7RestClientTest.groovy +++ b/dd-java-agent/instrumentation/elasticsearch/rest-7/src/test/groovy/Elasticsearch7RestClientTest.groovy @@ -132,14 +132,16 @@ class Elasticsearch7RestClientTest extends AgentTestRunner { } span { serviceName "elasticsearch" - resourceName "GET _cluster/health" + resourceName "GET /_cluster/health" operationName "http.request" spanType DDSpanTypes.HTTP_CLIENT childOf span(0) tags { "$Tags.COMPONENT" "apache-httpasyncclient" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.HTTP_URL" "_cluster/health" + "$Tags.PEER_HOSTNAME" httpTransportAddress.address + "$Tags.PEER_PORT" httpTransportAddress.port + "$Tags.HTTP_URL" "http://${httpTransportAddress.address}:${httpTransportAddress.port}/_cluster/health" "$Tags.HTTP_METHOD" "GET" "$Tags.HTTP_STATUS" 200 defaultTags() diff --git a/dd-java-agent/instrumentation/elasticsearch/src/main/java/datadog/trace/instrumentation/elasticsearch/ElasticsearchRestClientDecorator.java b/dd-java-agent/instrumentation/elasticsearch/src/main/java/datadog/trace/instrumentation/elasticsearch/ElasticsearchRestClientDecorator.java index ac9750c4a8e..0741e14a39e 100644 --- a/dd-java-agent/instrumentation/elasticsearch/src/main/java/datadog/trace/instrumentation/elasticsearch/ElasticsearchRestClientDecorator.java +++ b/dd-java-agent/instrumentation/elasticsearch/src/main/java/datadog/trace/instrumentation/elasticsearch/ElasticsearchRestClientDecorator.java @@ -21,10 +21,7 @@ public class ElasticsearchRestClientDecorator extends DBTypeProcessingDatabaseCl private static final int MAX_ELASTICSEARCH_BODY_CONTENT_LENGTH = 25000; private static final String SERVICE_NAME = - SpanNaming.instance() - .namingSchema() - .database() - .service(Config.get().getServiceName(), "elasticsearch"); + SpanNaming.instance().namingSchema().database().service("elasticsearch"); public static final CharSequence OPERATION_NAME = UTF8BytesString.create( @@ -97,7 +94,9 @@ public AgentSpan onRequest( final Map parameters) { span.setTag(Tags.HTTP_METHOD, method); span.setTag(Tags.HTTP_URL, endpoint); - if (Config.get().isElasticsearchBodyAndParamsEnabled()) { + + final Config config = Config.get(); + if (config.isElasticsearchBodyEnabled() || config.isElasticsearchBodyAndParamsEnabled()) { if (entity != null) { long contentLength = entity.getContentLength(); if (contentLength <= MAX_ELASTICSEARCH_BODY_CONTENT_LENGTH) { @@ -112,6 +111,9 @@ public AgentSpan onRequest( + ">"); } } + } + + if (config.isElasticsearchParamsEnabled() || config.isElasticsearchBodyAndParamsEnabled()) { if (parameters != null) { StringBuilder queryParametersStringBuilder = new StringBuilder(); for (Map.Entry parameter : parameters.entrySet()) { diff --git a/dd-java-agent/instrumentation/elasticsearch/src/main/java/datadog/trace/instrumentation/elasticsearch/ElasticsearchTransportClientDecorator.java b/dd-java-agent/instrumentation/elasticsearch/src/main/java/datadog/trace/instrumentation/elasticsearch/ElasticsearchTransportClientDecorator.java index 590c0edc6a1..172a7a56ef8 100644 --- a/dd-java-agent/instrumentation/elasticsearch/src/main/java/datadog/trace/instrumentation/elasticsearch/ElasticsearchTransportClientDecorator.java +++ b/dd-java-agent/instrumentation/elasticsearch/src/main/java/datadog/trace/instrumentation/elasticsearch/ElasticsearchTransportClientDecorator.java @@ -1,6 +1,5 @@ package datadog.trace.instrumentation.elasticsearch; -import datadog.trace.api.Config; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; @@ -11,10 +10,7 @@ public class ElasticsearchTransportClientDecorator extends DBTypeProcessingDatab private static final String DB_TYPE = "elasticsearch"; private static final String SERVICE_NAME = - SpanNaming.instance() - .namingSchema() - .database() - .service(Config.get().getServiceName(), DB_TYPE); + SpanNaming.instance().namingSchema().database().service(DB_TYPE); public static final CharSequence OPERATION_NAME = UTF8BytesString.create(SpanNaming.instance().namingSchema().database().operation(DB_TYPE)); diff --git a/dd-java-agent/instrumentation/exception-profiling/src/main/java/datadog/exceptions/instrumentation/ThrowableInstrumentation.java b/dd-java-agent/instrumentation/exception-profiling/src/main/java/datadog/exceptions/instrumentation/ThrowableInstrumentation.java index 08a9b11997c..8f37f033e23 100644 --- a/dd-java-agent/instrumentation/exception-profiling/src/main/java/datadog/exceptions/instrumentation/ThrowableInstrumentation.java +++ b/dd-java-agent/instrumentation/exception-profiling/src/main/java/datadog/exceptions/instrumentation/ThrowableInstrumentation.java @@ -1,21 +1,15 @@ package datadog.exceptions.instrumentation; -import static datadog.trace.agent.tooling.bytebuddy.matcher.HierarchyMatchers.declaresField; -import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; import static net.bytebuddy.matcher.ElementMatchers.isConstructor; import com.google.auto.service.AutoService; import datadog.trace.agent.tooling.Instrumenter; import datadog.trace.api.Platform; -import net.bytebuddy.description.type.TypeDescription; -import net.bytebuddy.matcher.ElementMatcher; -/** Provides instrumentation of {@linkplain Throwable} constructor. */ +/** Provides instrumentation of {@linkplain Exception} and {@linkplain Error} constructors. */ @AutoService(Instrumenter.class) public final class ThrowableInstrumentation extends Instrumenter.Profiling - implements Instrumenter.ForBootstrap, - Instrumenter.ForSingleType, - Instrumenter.WithTypeStructure { + implements Instrumenter.ForBootstrap, Instrumenter.ForKnownTypes { public ThrowableInstrumentation() { super("throwables"); @@ -27,17 +21,14 @@ public boolean isEnabled() { } @Override - public String instrumentedType() { - return "java.lang.Throwable"; - } - - @Override - public ElementMatcher structureMatcher() { - return declaresField(named("stackTrace")); + public void adviceTransformations(AdviceTransformation transformation) { + transformation.applyAdvice(isConstructor(), packageName + ".ThrowableInstanceAdvice"); } @Override - public void adviceTransformations(AdviceTransformation transformation) { - transformation.applyAdvice(isConstructor(), packageName + ".ThrowableInstanceAdvice"); + public String[] knownMatchingTypes() { + return new String[] { + "java.lang.Exception", "java.lang.Error", "kotlin.Exception", "kotlin.Error" + }; } } diff --git a/dd-java-agent/instrumentation/exception-profiling/src/main/java11/datadog/exceptions/instrumentation/ThrowableInstanceAdvice.java b/dd-java-agent/instrumentation/exception-profiling/src/main/java11/datadog/exceptions/instrumentation/ThrowableInstanceAdvice.java index 32072e031ed..3c2623f5a48 100644 --- a/dd-java-agent/instrumentation/exception-profiling/src/main/java11/datadog/exceptions/instrumentation/ThrowableInstanceAdvice.java +++ b/dd-java-agent/instrumentation/exception-profiling/src/main/java11/datadog/exceptions/instrumentation/ThrowableInstanceAdvice.java @@ -11,12 +11,7 @@ public class ThrowableInstanceAdvice { @Advice.OnMethodExit(suppress = Throwable.class) - public static void onExit( - @Advice.This final Throwable t, - @Advice.FieldValue("stackTrace") StackTraceElement[] stackTrace) { - if (t.getClass().getName().endsWith(".ResourceLeakDetector$TraceRecord")) { - return; - } + public static void onExit(@Advice.This final Object t) { /* * This instrumentation handler is sensitive to any throwables thrown from its body - * it will go into infinite loop of trying to handle the new throwable instance and generating @@ -31,24 +26,23 @@ public static void onExit( } try { /* - * Exclude internal agent threads from exception profiling. + * We may get into a situation when this is called before exception sampling is active. */ - if (Config.get().isProfilingExcludeAgentThreads() - && AGENT_THREAD_GROUP.equals(Thread.currentThread().getThreadGroup())) { + if (!InstrumentationBasedProfiling.isJFRReady()) { return; } /* - * We may get into a situation when this is called before exception sampling is active. + * Exclude internal agent threads from exception profiling. */ - if (!InstrumentationBasedProfiling.isJFRReady()) { + if (Config.get().isProfilingExcludeAgentThreads() + && AGENT_THREAD_GROUP.equals(Thread.currentThread().getThreadGroup())) { return; } /* * JFR will assign the stacktrace depending on the place where the event is committed. * Therefore we need to commit the event here, right in the 'Exception' constructor */ - final ExceptionSampleEvent event = - ExceptionProfiling.getInstance().process(t, stackTrace == null ? 0 : stackTrace.length); + final ExceptionSampleEvent event = ExceptionProfiling.getInstance().process((Throwable) t); if (event != null && event.shouldCommit()) { event.commit(); } diff --git a/dd-java-agent/instrumentation/freemarker/build.gradle b/dd-java-agent/instrumentation/freemarker/build.gradle new file mode 100644 index 00000000000..f2d5ef612aa --- /dev/null +++ b/dd-java-agent/instrumentation/freemarker/build.gradle @@ -0,0 +1,23 @@ +muzzle { + pass { + group = 'org.freemarker' + module = 'freemarker' + versions = '[2.3.32,]' + assertInverse = true + } +} + +apply from: "$rootDir/gradle/java.gradle" +apply plugin: 'call-site-instrumentation' + +addTestSuiteForDir('latestDepTest', 'test') + +dependencies { + compileOnly group: 'org.freemarker', name: 'freemarker', version: '2.3.32' + + testImplementation group: 'org.freemarker', name: 'freemarker', version: '2.3.32' + + testRuntimeOnly project(':dd-java-agent:instrumentation:iast-instrumenter') + + latestDepTestImplementation group: 'org.freemarker', name: 'freemarker', version: '+' +} diff --git a/dd-java-agent/instrumentation/freemarker/src/main/java/datadog/trace/instrumentation/freemarker/StringUtilCallSite.java b/dd-java-agent/instrumentation/freemarker/src/main/java/datadog/trace/instrumentation/freemarker/StringUtilCallSite.java new file mode 100644 index 00000000000..391339f528b --- /dev/null +++ b/dd-java-agent/instrumentation/freemarker/src/main/java/datadog/trace/instrumentation/freemarker/StringUtilCallSite.java @@ -0,0 +1,39 @@ +package datadog.trace.instrumentation.freemarker; + +import datadog.trace.agent.tooling.csi.CallSite; +import datadog.trace.api.iast.IastCallSites; +import datadog.trace.api.iast.InstrumentationBridge; +import datadog.trace.api.iast.Propagation; +import datadog.trace.api.iast.VulnerabilityMarks; +import datadog.trace.api.iast.propagation.PropagationModule; +import javax.annotation.Nullable; + +@Propagation +@CallSite(spi = IastCallSites.class) +public class StringUtilCallSite { + + @CallSite.After( + "java.lang.String freemarker.template.utility.StringUtil.HTMLEnc(java.lang.String)") + @CallSite.After( + "java.lang.String freemarker.template.utility.StringUtil.XMLEnc(java.lang.String)") + @CallSite.After( + "java.lang.String freemarker.template.utility.StringUtil.XHTMLEnc(java.lang.String)") + @CallSite.After( + "java.lang.String freemarker.template.utility.StringUtil.javaStringEnc(java.lang.String)") + @CallSite.After( + "java.lang.String freemarker.template.utility.StringUtil.javaScriptStringEnc(java.lang.String)") + @CallSite.After( + "java.lang.String freemarker.template.utility.StringUtil.jsonStringEnc(java.lang.String)") + public static String afterEscape( + @CallSite.Argument(0) @Nullable final String input, @CallSite.Return final String result) { + final PropagationModule module = InstrumentationBridge.PROPAGATION; + if (module != null) { + try { + module.taintIfTainted(result, input, false, VulnerabilityMarks.XSS_MARK); + } catch (final Throwable e) { + module.onUnexpectedException("afterEscape threw", e); + } + } + return result; + } +} diff --git a/dd-java-agent/instrumentation/freemarker/src/test/groovy/datadog/trace/instrumentation/freemarker/StringUtilCallSiteTest.groovy b/dd-java-agent/instrumentation/freemarker/src/test/groovy/datadog/trace/instrumentation/freemarker/StringUtilCallSiteTest.groovy new file mode 100644 index 00000000000..18d86d6aef5 --- /dev/null +++ b/dd-java-agent/instrumentation/freemarker/src/test/groovy/datadog/trace/instrumentation/freemarker/StringUtilCallSiteTest.groovy @@ -0,0 +1,61 @@ +package datadog.trace.instrumentation.freemarker + +import datadog.trace.agent.test.AgentTestRunner +import datadog.trace.api.iast.InstrumentationBridge +import datadog.trace.api.iast.VulnerabilityMarks +import datadog.trace.api.iast.propagation.PropagationModule +import foo.bar.TestStringUtilSuite + +class StringUtilCallSiteTest extends AgentTestRunner { + + @Override + protected void configurePreAgent() { + injectSysConfig("dd.iast.enabled", "true") + } + + void 'test #method'() { + given: + final module = Mock(PropagationModule) + InstrumentationBridge.registerIastModule(module) + + when: + final result = TestStringUtilSuite.&"$method".call(args) + + then: + result == expected + 1 * module.taintIfTainted(_ as String, args[0], false, VulnerabilityMarks.XSS_MARK) + 0 * _ + + where: + method | args | expected + 'HTMLEnc' | ['"escape this < '] | '<htmlTag>"escape this < </htmlTag>' + 'XMLEnc' | ['"escape this < '] | '<xmlTag>"escape this < </xmlTag>' + 'XHTMLEnc' | ['"escape this < '] | '<htmlTag>"escape this < </htmlTag>' + 'javaStringEnc' | ['