diff --git a/.buildkite/logstash_artifact_scan_pipeline.yml b/.buildkite/logstash_artifact_scan_pipeline.yml new file mode 100644 index 00000000000..3a35365189a --- /dev/null +++ b/.buildkite/logstash_artifact_scan_pipeline.yml @@ -0,0 +1,9 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json + +steps: + - label: ":pipeline: Logstash Artifact Scan" + command: | + set -euo pipefail + python3 -m pip install pyyaml requests + python3 .buildkite/scripts/snyk/artifact-scan/generate-steps.py > steps.yml + buildkite-agent pipeline upload < steps.yml diff --git a/.buildkite/scripts/snyk/artifact-scan/Gemfile b/.buildkite/scripts/snyk/artifact-scan/Gemfile new file mode 100644 index 00000000000..7318b99ea67 --- /dev/null +++ b/.buildkite/scripts/snyk/artifact-scan/Gemfile @@ -0,0 +1,3 @@ +source 'https://rubygems.org' + +gem 'rubyzip', '~> 2.3' diff --git a/.buildkite/scripts/snyk/artifact-scan/extract_versions.rb b/.buildkite/scripts/snyk/artifact-scan/extract_versions.rb new file mode 100755 index 00000000000..c4a0da74d3b --- /dev/null +++ b/.buildkite/scripts/snyk/artifact-scan/extract_versions.rb @@ -0,0 +1,479 @@ +#!/usr/bin/env ruby + +require 'csv' +require 'json' +require 'securerandom' +require 'time' +require 'zip' + +# Extracts version info from JARs and gemspecs with confidence scoring +# +# Usage: bundle exec ruby extract_versions.rb [output.csv] + +def generate_sbom(results, output_file) + components = [] + + results.each do |r| + next if r[:version] == 'unknown' || r[:normalized_version] == 'unknown' + + component = { type: 'library', name: r[:name], version: r[:version] } + + # Generate PURL based on type + if r[:type] == 'gem' + component[:purl] = "pkg:gem/#{r[:name]}@#{r[:version]}" + else + if r[:group_id] && r[:artifact_id] + group, artifact = r[:group_id], r[:artifact_id] + elsif r[:name].include?(':') + group, artifact = r[:name].split(':', 2) + else + group, artifact = r[:name], r[:name] + end + component[:purl] = "pkg:maven/#{group}/#{artifact}@#{r[:version]}" + component[:group] = group + component[:name] = artifact + end + + # Add evidence of location + component[:properties] = [ + { name: 'filepath', value: r[:filepath] }, + { name: 'confidence', value: r[:confidence] }, + { name: 'sources', value: r[:sources] } + ] + + components << component + end + + sbom = { + bomFormat: 'CycloneDX', + specVersion: '1.5', + serialNumber: "urn:uuid:#{SecureRandom.uuid}", + version: 1, + metadata: { + timestamp: Time.now.utc.iso8601, + tools: [ + { + vendor: 'custom', + name: 'extract_versions.rb', + version: '1.0.0' + } + ], + component: { + type: 'application', + name: 'logstash', + version: 'extracted' + } + }, + components: components + } + + File.write(output_file, JSON.pretty_generate(sbom)) +end + +class VersionExtractor + MANIFEST_VERSION_KEYS = %w[ + Implementation-Version + Bundle-Version + Specification-Version + ].freeze + + # Classifiers/suffixes to strip for normalization + VERSION_SUFFIXES = %w[ + -jre .jre -java -GA .GA -SNAPSHOT -Final .Final + -release-\d+ -incubating -alpha -beta -rc\d* + ].freeze + + def initialize(root_dir) + @root_dir = root_dir + end + + def extract_all + results = [] + results.concat(extract_jars) + results.concat(extract_gems) + results + end + + private + + # Normalize version for comparison + # "33.1.0-jre" -> "33.1.0", "2.2" -> "2.2.0", "3.24.0.GA" -> "3.24.0" + def normalize_version(v) + return nil if v.nil? || v.empty? + + normalized = v.dup + + # Strip known suffixes + VERSION_SUFFIXES.each do |suffix| + normalized.gsub!(/#{suffix}$/i, '') + end + + # Normalize x.y to x.y.0 + if normalized =~ /^\d+\.\d+$/ + normalized = "#{normalized}.0" + end + + # Handle Derby format: 10.15.2000001.??? -> 10.15.2.1 + # Derby encodes a.b.c.d as a.b.(c*1000000 + d).??? + if normalized =~ /^(\d+)\.(\d+)\.(\d+)\.\?\?\?$/ + major, minor, encoded = $1, $2, $3.to_i + patch = encoded / 1000000 + build = encoded % 1000000 + if build > 0 + normalized = "#{major}.#{minor}.#{patch}.#{build}" + else + normalized = "#{major}.#{minor}.#{patch}.0" + end + end + + normalized + end + + # --- JAR extraction --- + + def extract_jars + results = [] + jar_files = Dir.glob(File.join(@root_dir, '**', '*.jar')) + + jar_files.each do |jar_path| + jar_results = extract_jar_versions(jar_path) + results.concat(jar_results) + end + + results + end + + def extract_jar_versions(jar_path) + sources = {} + pom_entries = [] + + # Parse filename + filename_info = parse_jar_filename(File.basename(jar_path)) + if filename_info[:version] + sources[:filename] = filename_info[:version] + end + + begin + Zip::File.open(jar_path) do |zip| + # Find all pom.properties + zip.each do |entry| + if entry.name =~ %r{META-INF/maven/(.+)/(.+)/pom\.properties$} + group_id, artifact_id = $1, $2 + content = entry.get_input_stream.read + version = parse_pom_properties(content)[:version] + if version + pom_entries << { group_id: group_id, artifact_id: artifact_id, version: version } + end + end + end + + # Get MANIFEST.MF + manifest_entry = zip.find_entry('META-INF/MANIFEST.MF') + if manifest_entry + content = manifest_entry.get_input_stream.read + manifest_version = parse_manifest(content) + sources[:manifest] = manifest_version if manifest_version + end + end + rescue Zip::Error => e + sources[:error] = e.message + rescue => e + sources[:error] = e.message + end + + relative_path = relative(jar_path) + + # If no version found, try to infer from parent gem directory + if sources.empty? || (sources.keys == [:error]) + gem_version = infer_version_from_gem_path(jar_path) + sources[:inferred_from_gem_path] = gem_version if gem_version + end + + # If multiple pom.properties, treat as shaded JAR - report each bundled dep + if pom_entries.size > 1 + results = [] + + # Report the main JAR itself + main_name = filename_info[:name] || File.basename(jar_path, '.jar') + main_result = build_result( + type: 'jar', + name: main_name, + sources: sources, + filepath: relative_path + ) + results << main_result + + # Report each shaded dependency + pom_entries.each do |pom| + shaded_sources = { + "pom.properties[#{pom[:group_id]}:#{pom[:artifact_id]}]" => pom[:version] + } + results << build_result( + type: 'jar-shaded', + name: "#{pom[:group_id]}:#{pom[:artifact_id]}", + sources: shaded_sources, + filepath: relative_path + ) + end + + results + elsif pom_entries.size == 1 + pom = pom_entries.first + jar_basename = File.basename(jar_path, '.jar').downcase + pom_artifact = pom[:artifact_id].downcase + + # Check if the pom.properties artifact matches the JAR name + # If not, it's likely a shaded/bundled dependency, not the main artifact + # Example: xalan-2.7.3.jar contains org.apache.bcel:bcel pom.properties + if jar_basename.include?(pom_artifact) || pom_artifact.include?(jar_basename.split('-').first) + sources[:pom_properties] = pom[:version] + name = filename_info[:name] || pom[:artifact_id] + [build_result(type: 'jar', name: name, sources: sources, filepath: relative_path, group_id: pom[:group_id], artifact_id: pom[:artifact_id])] + else + # pom.properties doesn't match JAR name - treat as shaded dependency + results = [] + name = filename_info[:name] || File.basename(jar_path, '.jar') + results << build_result(type: 'jar', name: name, sources: sources, filepath: relative_path) + shaded_sources = { + "pom.properties[#{pom[:group_id]}:#{pom[:artifact_id]}]" => pom[:version] + } + results << build_result( + type: 'jar-shaded', + name: "#{pom[:group_id]}:#{pom[:artifact_id]}", + sources: shaded_sources, + filepath: relative_path + ) + results + end + else + name = filename_info[:name] || File.basename(jar_path, '.jar') + [build_result(type: 'jar', name: name, sources: sources, filepath: relative_path)] + end + end + + def parse_jar_filename(filename) + # Handle complex names + base = filename.sub(/\.jar$/, '') + parts = base.split('-') + + version_start_idx = nil + + parts.each_with_index do |part, idx| + next if idx == 0 + + if part =~ /^\d+\.\d+\.\d+/ + version_start_idx = idx + break + end + + if part =~ /^\d+\.\d+$/ + remaining = parts[(idx + 1)..] + if remaining.empty? || remaining.all? { |p| p =~ /^(\d+|jre|java|GA|Final|SNAPSHOT|RC\d*|alpha|beta|incubating)$/i } + version_start_idx = idx + break + end + end + end + + if version_start_idx && version_start_idx > 0 + name = parts[0...version_start_idx].join('-') + version = parts[version_start_idx..].join('-') + { name: name, version: version } + else + { name: base, version: nil } + end + end + + def parse_pom_properties(content) + props = {} + content.each_line do |line| + if line =~ /^(\w+)=(.+)$/ + props[$1.to_sym] = $2.strip + end + end + props + end + + def infer_version_from_gem_path(jar_path) + if jar_path =~ %r{/gems/([^/]+)-(\d+\.\d+[^/]*?)(?:-java)?/} + return $2 + end + nil + end + + def parse_manifest(content) + content = content.gsub(/\r?\n /, '') + + MANIFEST_VERSION_KEYS.each do |key| + if content =~ /^#{key}:\s*(.+)$/i + return $1.strip + end + end + nil + end + + # --- Gem extraction --- + + def extract_gems + results = [] + gemspec_files = Dir.glob(File.join(@root_dir, '**', '*.gemspec')) + + gemspec_files.each do |gemspec_path| + results << extract_gem_version(gemspec_path) + end + + results + end + + def extract_gem_version(gemspec_path) + sources = {} + filename = File.basename(gemspec_path) + + filename_info = parse_gemspec_filename(filename) + sources[:filename] = filename_info[:version] if filename_info[:version] + + dir_name = File.basename(File.dirname(gemspec_path)) + dir_info = parse_gem_dirname(dir_name) + sources[:dirname] = dir_info[:version] if dir_info[:version] + + begin + content = File.read(gemspec_path) + content_version = parse_gemspec_content(content) + sources[:gemspec_content] = content_version if content_version + rescue => e + sources[:error] = e.message + end + + name = filename_info[:name] || dir_info[:name] || filename.sub(/\.gemspec$/, '') + + build_result( + type: 'gem', + name: name, + sources: sources, + filepath: relative(gemspec_path) + ) + end + + def parse_gemspec_filename(filename) + if filename =~ /^(.+?)-(\d+\.\d+[^-]*?)(?:-[a-z]+)?\.gemspec$/ + { name: $1, version: $2 } + else + { name: filename.sub(/\.gemspec$/, ''), version: nil } + end + end + + def parse_gem_dirname(dirname) + if dirname =~ /^(.+?)-(\d+\.\d+[^-]*?)(?:-[a-z]+)?$/ + { name: $1, version: $2 } + else + { name: dirname, version: nil } + end + end + + def parse_gemspec_content(content) + patterns = [ + /\bs\.version\s*=\s*["']([^"']+)["']/, + /\bspec\.version\s*=\s*["']([^"']+)["']/, + /\.version\s*=\s*["']([^"']+)["']/, + /\bversion\s*=\s*["']([^"']+)["']\.freeze/, + /\bVERSION\s*=\s*["']([^"']+)["']/, + ] + + patterns.each do |pattern| + if content =~ pattern + version = $1 + return version if version =~ /^\d+\.\d+/ + end + end + + nil + end + + # --- Confidence & output --- + + def build_result(type:, name:, sources:, filepath:, group_id: nil, artifact_id: nil) + error = sources.delete(:error) + + raw_versions = sources.values.compact + normalized_versions = raw_versions.map { |v| normalize_version(v) }.compact.uniq + + confidence = if normalized_versions.empty? + 'none' + elsif normalized_versions.size == 1 + sources.size >= 2 ? 'high' : 'medium' + else + 'conflict' + end + + version = case confidence + when 'high', 'medium' + raw_versions.first + when 'conflict' + raw_versions.uniq.join(' vs ') + else + 'unknown' + end + + normalized = normalized_versions.size == 1 ? normalized_versions.first : nil + + sources_str = sources.map { |k, v| "#{k}:#{v}" }.join(';') + sources_str += ";error:#{error}" if error + + { + type: type, + name: name, + version: version, + normalized_version: normalized || version, + confidence: confidence, + sources: sources_str, + filepath: filepath, + group_id: group_id, + artifact_id: artifact_id + } + end + + def relative(path) + path.sub(/^#{Regexp.escape(@root_dir)}\/?/, './') + end +end + +# --- Main --- + +if ARGV.empty? + puts "Usage: #{$0} [output.csv]" + exit 1 +end + +root_dir = ARGV[0] +output_file = ARGV[1] || 'output.csv' + +unless Dir.exist?(root_dir) + puts "Error: #{root_dir} is not a directory" + exit 1 +end + +extractor = VersionExtractor.new(root_dir) +results = extractor.extract_all + +CSV.open(output_file, 'w') do |csv| + csv << %w[type name version normalized_version confidence sources filepath] + results.each do |r| + csv << [r[:type], r[:name], r[:version], r[:normalized_version], r[:confidence], r[:sources], r[:filepath]] + end +end + +by_name = results.group_by { |r| [r[:type].sub('-shaded', ''), r[:name]] } +duplicates = by_name.select { |_, entries| entries.map { |e| e[:normalized_version] }.uniq.size > 1 } + +duplicates_file = output_file.sub(/\.csv$/, '_duplicates.csv') +CSV.open(duplicates_file, 'w') do |csv| + csv << %w[type name versions count locations] + duplicates.sort_by { |k, _| k }.each do |(type, name), entries| + versions = entries.map { |e| e[:normalized_version] }.uniq.sort.join('; ') + locations = entries.map { |e| "#{e[:normalized_version]}:#{e[:filepath]}" }.join('; ') + csv << [type, name, versions, entries.size, locations] + end +end + +sbom_file = output_file.sub(/\.csv$/, '_sbom.json') +generate_sbom(results, sbom_file) diff --git a/.buildkite/scripts/snyk/artifact-scan/generate-steps.py b/.buildkite/scripts/snyk/artifact-scan/generate-steps.py new file mode 100755 index 00000000000..b6d3de663f4 --- /dev/null +++ b/.buildkite/scripts/snyk/artifact-scan/generate-steps.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 + +import sys +import requests +import yaml + +YAML_HEADER = '# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json\n' +VERSIONS_URL = "https://raw.githubusercontent.com/logstash-plugins/.ci/1.x/logstash-versions.yml" + + +def fetch_logstash_versions() -> dict: + try: + response = requests.get(VERSIONS_URL, timeout=30) + response.raise_for_status() + return yaml.safe_load(response.text) + except Exception as e: + print(f"Error: Failed to fetch logstash versions: {e}", file=sys.stderr) + sys.exit(1) + + +def generate_extraction_step(version: str, version_type: str) -> dict: + base_url = "https://snapshots.elastic.co/downloads/logstash" if version_type == 'snapshot' else "https://artifacts.elastic.co/downloads/logstash" + artifact_url = f"{base_url}/logstash-{version}-linux-aarch64.tar.gz" + step_key = f"extract-{version}".replace('.', '-') + + command = f"""#!/bin/bash +set -euo pipefail + +export SNYK_TOKEN=$(vault read -field=token secret/ci/elastic-logstash/snyk-creds) + +echo "--- Downloading Logstash {version}" +wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 5 \\ + -O logstash.tar.gz "{artifact_url}" || {{ + echo "Failed to download {version}, skipping..." + exit 0 +}} + +echo "--- Extracting tarball" +tar -xzf logstash.tar.gz +extracted_dir=$(tar -tzf logstash.tar.gz | head -1 | cut -f1 -d"/") + +echo "--- Running extraction via Gradle" +./gradlew extractArtifactVersions -PartifactDir="$PWD/${{extracted_dir}}" -PoutputFile="$PWD/.buildkite/scripts/snyk/artifact-scan/output.csv" + +echo "--- Downloading snyk..." +cd .buildkite/scripts/snyk/artifact-scan +curl -sL --retry-max-time 60 --retry 3 --retry-delay 5 https://static.snyk.io/cli/latest/snyk-linux -o snyk +chmod +x ./snyk + +echo "--- Running Snyk monitor for Logstash {version}" +# NOTE: we may need to check if our enterprise account allows SBOM yet... There may be some other options if not +./snyk monitor --experimental --file=output_sbom.json --org=logstash --target-reference={version} --project-name="logstash-artifact-{version}" + +echo "--- Uploading artifacts" +buildkite-agent artifact upload "output*.csv" +buildkite-agent artifact upload "output*.json" + +echo "--- Cleanup" +cd ../../../.. +rm -rf "${{extracted_dir}}" logstash.tar.gz +cd .buildkite/scripts/snyk/artifact-scan +rm -f snyk output*.csv output*.json +""" + + return { + "label": f":mag: {version}", + "key": step_key, + "command": command, + "artifact_paths": [ + ".buildkite/scripts/snyk/artifact-scan/output*.csv", + ".buildkite/scripts/snyk/artifact-scan/output*.json" + ] + } + + +def generate_pipeline() -> dict: + versions_data = fetch_logstash_versions() + steps = [] + + if 'releases' in versions_data: + for version in versions_data['releases'].values(): + steps.append(generate_extraction_step(version, 'release')) + + if 'snapshots' in versions_data: + for version in versions_data['snapshots'].values(): + steps.append(generate_extraction_step(version, 'snapshot')) + + return { + "agents": { + "provider": "gcp", + "imageProject": "elastic-images-prod", + "image": "family/platform-ingest-logstash-multi-jdk-ubuntu-2204", + "machineType": "n2-standard-2", + "diskSizeGb": 20 + }, + "steps": steps + } + + +if __name__ == "__main__": + pipeline = generate_pipeline() + print(YAML_HEADER + yaml.dump(pipeline, default_flow_style=False, sort_keys=False)) diff --git a/build.gradle b/build.gradle index c016e86b25a..1fbd940e181 100644 --- a/build.gradle +++ b/build.gradle @@ -1121,6 +1121,38 @@ clean { dependsOn deleteLocalJdk String jdkVersionFilename = tasks.findByName("extractBundledJdkVersion").outputFilename delete "${projectDir}/${jdkVersionFilename}" + delete fileTree(projectDir) { + include 'output*.csv' + include 'output*.json' + } + delete "${projectDir}/.buildkite/scripts/snyk/artifact-scan/Gemfile.lock" + delete fileTree("${projectDir}/.buildkite/scripts/snyk/artifact-scan") { + include 'output*.csv' + include 'output*.json' + } +} + +tasks.register("extractArtifactVersions") { + description = "Extract versions from a Logstash artifact (generates SBOM for Snyk scanning)" + dependsOn bootstrap + + doLast { + def artifactDir = project.findProperty('artifactDir') + def outputFile = project.findProperty('outputFile') ?: 'output.csv' + + if (!artifactDir) { + throw new GradleException("artifactDir property required. Usage: ./gradlew extractArtifactVersions -PartifactDir=/path/to/logstash -PoutputFile=output.csv") + } + + rake(projectDir, buildDir, 'extract:artifact_versions', artifactDir, outputFile) + } +} + +tasks.register("cleanExtraction") { + description = "Clean artifact extraction output files" + doLast { + rake(projectDir, buildDir, 'extract:clean') + } } if (System.getenv('OSS') != 'true') { diff --git a/catalog-info.yaml b/catalog-info.yaml index 10ad37c699a..36b73c7e18d 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -28,6 +28,7 @@ spec: - resource:buildkite-logstash-serverless-integration-testing - resource:logstash-snyk-report - resource:logstash-plugins-snyk-report + - resource:logstash-artifact-snyk-scan - resource:logstash-dra-snapshot-pipeline - resource:logstash-dra-staging-pipeline - resource:logstash-linux-jdk-matrix-pipeline @@ -171,6 +172,52 @@ spec: cronline: "@daily" message: "Run the Logstash Plugins Snyk report every day." +# *********************************** +# Declare artifacts-snyk-report pipeline +# *********************************** +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: logstash-artifact-snyk-scan + description: 'The logstash-artifacts-snyk-report pipeline.' +spec: + type: buildkite-pipeline + owner: group:logstash + system: platform-ingest + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: logstash-artifacts-snyk-report-ci + description: ':logstash: The logstash-artifacts-snyk-report :pipeline:' + spec: + repository: elastic/logstash + pipeline_file: ".buildkite/logstash_artifact_scan_pipeline.yml" + maximum_timeout_in_minutes: 60 + provider_settings: + trigger_mode: none # don't trigger jobs + env: + ELASTIC_SLACK_NOTIFICATIONS_ENABLED: 'true' + SLACK_NOTIFICATIONS_CHANNEL: '#logstash-build' + SLACK_NOTIFICATIONS_ON_SUCCESS: 'false' + SLACK_NOTIFICATIONS_SKIP_FOR_RETRIES: 'true' + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + logstash: + access_level: MANAGE_BUILD_AND_READ + ingest-eng-prod: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + schedules: + Daily Artifacts Snyk scan: + branch: main + cronline: "@daily" + message: "Run the Logstash Artifacts Snyk report every day." + # *********************************** # SECTION START: DRA pipelines # *********************************** diff --git a/rakelib/snyk_scan.rake b/rakelib/snyk_scan.rake new file mode 100644 index 00000000000..25a9af433ac --- /dev/null +++ b/rakelib/snyk_scan.rake @@ -0,0 +1,74 @@ +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +namespace "extract" do + desc "Extract versions from a Logstash artifact directory" + task :artifact_versions, [:artifact_dir, :output_file] do |t, args| + require 'fileutils' + + artifact_dir = args[:artifact_dir] || raise("artifact_dir argument required") + output_file = args[:output_file] || "output.csv" + + # Resolve to absolute path before changing directories + artifact_dir = File.expand_path(artifact_dir) + output_file = File.expand_path(output_file) + + unless File.directory?(artifact_dir) + raise "Error: #{artifact_dir} is not a directory" + end + + script_dir = File.join(Dir.pwd, ".buildkite", "scripts", "snyk", "artifact-scan") + extract_script = File.join(script_dir, "extract_versions.rb") + + unless File.exist?(extract_script) + raise "Error: extraction script not found at #{extract_script}" + end + + # Install dependencies using the current Ruby (JRuby via Gradle) + puts "Installing dependencies..." + Dir.chdir(script_dir) do + require 'bundler' + Bundler.with_unbundled_env do + system('bundle', 'install', '--quiet') || raise("Failed to install dependencies") + end + end + + # Run the extraction script with the current Ruby (JRuby) + puts "Extracting versions from #{artifact_dir}..." + Dir.chdir(script_dir) do + # Set ARGV for the script + ARGV.clear + ARGV << artifact_dir << output_file + load extract_script + end + + sbom_file = output_file.sub('.csv', '_sbom.json') + + puts "\nGenerated files:" + puts " - #{output_file}" + puts " - #{output_file.sub('.csv', '_duplicates.csv')}" + puts " - #{sbom_file}" + puts "\nReady for Snyk scanning with: snyk sbom test --experimental --file=#{sbom_file}" + end + + desc "Clean artifact extraction output files" + task :clean do + FileUtils.rm_f(Dir.glob("output*.csv")) + FileUtils.rm_f(Dir.glob("output*.json")) + puts "Cleaned extraction output files" + end +end