Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hdf2ckl severity #2716

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 56 additions & 0 deletions pack-inspecjs.bat
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
ECHO OFF

SET CYPRESS_INSTALL_BINARY=0
SET PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true

SET original_dir=%cd%
ECHO %original_dir%

IF DEFINED npm_config_heimdall (
CD %npm_config_heimdall%/libs/inspecjs/
) ELSE (
CD ../heimdall2/libs/inspecjs/
)

IF DEFINED npm_config_branch (
CALL git switch %npm_config_branch% || EXIT /B %ERRORLEVEL%
) ELSE (
CALL git switch master || EXIT /B %ERRORLEVEL%
)

ECHO Executing - git fetch ...
CALL git fetch || EXIT /B %ERRORLEVEL%

ECHO Executing - git pull ...
CALL git pull || EXIT /B %ERRORLEVEL%

ECHO Executing - yarn install ...
CALL yarn install || EXIT /B %ERRORLEVEL%

ECHO Executing - yarn pack ...
CALL yarn pack || EXIT /B %ERRORLEVEL%

ECHO Finished generating the tarball

CD %original_dir%

ECHO Executing - npm install remote ...
CALL npm i || EXIT /B %ERRORLEVEL%

ECHO Executing - npm install local ...

IF DEFINED npm_config_heimdall (
FOR /f "tokens=*" %%a IN ('dir /b %npm_config_heimdall%\libs\inspecjs\inspecjs-v*.tgz') DO (
SET THIS_TAR_ZIP=%npm_config_heimdall%\libs\inspecjs\%%a
)
) ELSE (
FOR /f "tokens=*" %%a IN ('dir /b ..\heimdall2\libs\inspecjs\inspecjs-v*.tgz') DO (
SET THIS_TAR_ZIP=..\heimdall2\libs\inspecjs\%%a
)
)
CALL npm i %THIS_TAR_ZIP% || EXIT /B %ERRORLEVEL%

ECHO Executing - npm run prepack ...
CALL npm run prepack || EXIT /B %ERRORLEVEL%

ECHO Install of local inspecjs complete.
40 changes: 40 additions & 0 deletions pack-inspecjs.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
#!/bin/bash

set -o errexit # abort on nonzero exitstatus
set -o nounset # abort on unbound variable
set -o pipefail # don't hide errors within pipes

ORIGINAL=$PWD
echo $ORIGINAL

cd "${npm_config_heimdall:-../heimdall2}"
cd libs/inspecjs

git switch "${npm_config_branch:-master}"

echo "Executing - git fetch ..."
git fetch

echo "Executing - git pull ..."
git pull

echo "Executing - yarn install ..."
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true yarn install

echo "Executing - yarn pack ..."
yarn pack

echo "Finished generating the tarball"

cd "$ORIGINAL"

echo "Executing - npm install remote ..."
npm i

echo "Executing - npm install local ..."
npm i "${npm_config_heimdall:-../heimdall2}/libs/inspecjs/inspecjs-v"*".tgz"

echo "Executing - npm run prepack ..."
npm run prepack

echo "Install of local inspecjs complete."
5 changes: 4 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,10 @@
"prepack:darwin:linux": "rm -rf lib && tsc",
"pack-hdf-converters": "run-script-os",
"pack-hdf-converters:win32": "pack-hdf-converters.bat",
"pack-hdf-converters:darwin:linux": "./pack-hdf-converters.sh"
"pack-hdf-converters:darwin:linux": "./pack-hdf-converters.sh",
"pack-inspecjs": "run-script-os",
"pack-inspecjs:win32": "pack-inspecjs.bat",
"pack-inspecjs:darwin:linux": "./pack-inspecjs.sh"
},
"types": "lib/index.d.ts",
"jest": {
Expand Down
70 changes: 21 additions & 49 deletions src/commands/convert/hdf2ckl.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,8 @@
import {Command, Flags} from '@oclif/core'
import {contextualizeEvaluation} from 'inspecjs'
import _ from 'lodash'
import fs from 'fs'
import {v4} from 'uuid'
import {default as files} from '../../resources/files.json'
import Mustache from 'mustache'
import {CKLMetadata} from '../../types/checklist'
import {convertFullPathToFilename, getProfileInfo} from '../../utils/global'
import {getDetails} from '../../utils/checklist'
import {ChecklistResults as Mapper} from '@mitre/hdf-converters'

export default class HDF2CKL extends Command {
static usage = 'convert hdf2ckl -i <hdf-scan-results-json> -o <output-ckl> [-h] [-m <metadata>] [-H <hostname>] [-F <fqdn>] [-M <mac-address>] [-I <ip-address>]'
Expand All @@ -25,54 +20,31 @@ export default class HDF2CKL extends Command {
ip: Flags.string({char: 'I', required: false, description: 'IP address for CKL metadata'}),
}

static examples = ['saf convert hdf2ckl -i rhel7-results.json -o rhel7.ckl --fqdn reverseproxy.example.org --hostname reverseproxy --ip 10.0.0.3 --mac 12:34:56:78:90']
static examples = ['saf convert hdf2ckl -i rhel7-results.json -o rhel7.ckl --fqdn reverseproxy.example.org --hostname reverseproxy --ip 10.0.0.3 --mac 12:34:56:78:90:AB']

async run() {
const {flags} = await this.parse(HDF2CKL)
const contextualizedEvaluation = contextualizeEvaluation(JSON.parse(fs.readFileSync(flags.input, 'utf8')))
const profileName = contextualizedEvaluation.data.profiles[0].name
const controls = contextualizedEvaluation.contains.flatMap(profile => profile.contains) || []
const rootControls = _.uniqBy(controls, control =>
_.get(control, 'root.hdf.wraps.id'),
).map(({root}) => root)
let cklData = {}
const cklMetadata: CKLMetadata = {
fileName: convertFullPathToFilename(flags.input),
benchmark: {
title: profileName || null,
version: '1',
plaintext: null,
},
stigid: profileName || null,
role: 'None',
type: 'Computing',
hostname: flags.hostname || _.get(contextualizedEvaluation, 'evaluation.data.passthrough.hostname') || null,
ip: flags.ip || _.get(contextualizedEvaluation, 'evaluation.data.passthrough.ip') || null,
mac: flags.mac || _.get(contextualizedEvaluation, 'evaluation.data.passthrough.mac') || null,
fqdn: flags.fqdn || _.get(contextualizedEvaluation, 'evaluation.data.passthrough.fqdn') || null,
tech_area: null,
target_key: '0',
web_or_database: 'false',
web_db_site: null,
web_db_instance: null,
}

if (flags.metadata) {
const cklMetadataInput: CKLMetadata = JSON.parse(fs.readFileSync(flags.metadata, 'utf8'))
for (const field in cklMetadataInput) {
if (typeof cklMetadata[field] === 'string' || typeof cklMetadata[field] === 'object') {
cklMetadata[field] = cklMetadataInput[field]
}
}
}
/* Order of prescedece for checklist metadata:
command flags (hostname, ip, etc.)
metadata flag
input hdf file passthrough.metadata
input hdf file passthrough.checklist.asset */

cklData = {
releaseInfo: cklMetadata.benchmark.plaintext,
...cklMetadata,
profileInfo: getProfileInfo(contextualizedEvaluation, cklMetadata.fileName),
uuid: v4(),
controls: rootControls.map(control => getDetails(control, profileName)),
const defaultMetadata: CKLMetadata = {
role: 'None', assettype: 'Computing', targetkey: '0', webordatabase: false, profiles: [],
hostfqdn: '', hostip: '', hostmac: '', hostguid: '', marking: '', techarea: '',
hostname: '', stigguid: '', targetcomment: '', webdbinstance: '', webdbsite: '',
}
fs.writeFileSync(flags.output, Mustache.render(files['cklExport.ckl'].data, cklData).replaceAll(/[^\x00-\x7F]/g, ''))
const inputHDF = JSON.parse(fs.readFileSync(flags.input, 'utf8'))
const flagMetadata = {hostname: flags.hostname, hostip: flags.ip, hostmac: flags.mac, hostfqdn: flags.fqdn}
const fileMetadata = flags.metadata ? JSON.parse(fs.readFileSync(flags.metadata, 'utf8')) : {}
const hdfMetadata = _.get(inputHDF, 'passthrough.metadata', _.get(inputHDF, 'passthrough.checklist.asset', {}))
const metadata = _.merge(_.merge(defaultMetadata, hdfMetadata, fileMetadata, flagMetadata))

metadata.profiles = flags.metadata ? _.get(fileMetadata, 'profiles', []) : _.get(hdfMetadata, 'profiles', [])
_.set(inputHDF, 'passthrough.metadata', metadata)

fs.writeFileSync(flags.output, new Mapper(inputHDF).toCkl())
}
}
36 changes: 17 additions & 19 deletions src/types/checklist.d.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import {StigMetadata} from '@mitre/hdf-converters'
import {ContextualizedEvaluation} from 'inspecjs'

export interface ChecklistControl {
Expand All @@ -20,25 +21,22 @@ export interface ChecklistControl {
}

export interface CKLMetadata {
fileName: string;
benchmark: {
title: string | null;
version: string | null;
plaintext: string | null;
};
stigid: string | null;
role: string | null;
type: string | null;
hostname: string | null;
ip: string | null;
mac: string | null;
fqdn: string | null;
tech_area: string | null;
target_key: string | null;
web_or_database: string | null;
web_db_site: string | null;
web_db_instance: string | null;
[key: string]: string | null | Record<string, string | null>;
assettype: null | string;
hostfqdn: null | string;
hostguid: null | string;
hostip: null | string;
hostmac: null | string;
hostname: null | string;
marking: null | string;
role: null | string;
stigguid: null | string;
targetcomment: null | string;
targetkey: null | string;
techarea: null | string;
webdbinstance: null | string;
webdbsite: null | string;
webordatabase: null | boolean;
profiles: StigMetadata[]
}

type ExtendedEvaluationFile = {
Expand Down
16 changes: 15 additions & 1 deletion src/utils/threshold.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,21 @@ export function extractStatusCounts(profile: ContextualizedProfile, severity?: s
for (const c of profile.contains.filter(control => control.extendedBy.length === 0)) {
const control = c.root
const status: ControlStatus = control.hdf.status
const controlSeverity: Severity = control.hdf.severity
const impact = control.data.impact

// using impact here is more reflective of how important these controls are
let controlSeverity: Severity
if (impact < 0.1)
controlSeverity = 'none'
else if (impact < 0.4)
controlSeverity = 'low'
else if (impact < 0.7)
controlSeverity = 'medium'
else if (impact < 0.9)
controlSeverity = 'high'
else
controlSeverity = 'critical'

if (!severity || (controlSeverity === severity)) {
++hash[status]
if (status === 'Passed') {
Expand Down
13 changes: 13 additions & 0 deletions test/commands/convert/ckl2hdf.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,16 @@ describe('Test ckl2hdf Three Stig Checklist example', () => {
expect(omitHDFChangingFields(test)).to.eql(omitHDFChangingFields(sample))
})
})

describe('Test ckl2hdf Small Checklist Overrides examples', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command(['convert ckl2hdf', '-i', path.resolve('./test/sample_data/checklist/sample_input_report/small_ckl_overrides.ckl'), '-o', `${tmpobj.name}/smallchecklistoverrides.json`])
.it('hdf-converter output test', () => {
const test = JSON.parse(fs.readFileSync(`${tmpobj.name}/smallchecklistoverrides.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/checklist/small_overrides_hdf.json'), 'utf8'))
expect(omitHDFChangingFields(test)).to.eql(omitHDFChangingFields(sample))
})
})
21 changes: 15 additions & 6 deletions test/commands/convert/hdf2ckl.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,28 +9,37 @@ describe('Test hdf2checklist', () => {

test
.stdout()
.command(['convert hdf2ckl', '-i', path.resolve('./test/sample_data/HDF/input/red_hat_good.json'), '-o', `${tmpobj.name}/hdf2ckl_test.json`])
.command(['convert hdf2ckl', '-i', path.resolve('./test/sample_data/HDF/input/red_hat_good.json'), '-o', `${tmpobj.name}/hdf2ckl_test.ckl`])
.it('hdf-converter output test - defaults', () => {
const test = fs.readFileSync(`${tmpobj.name}/hdf2ckl_test.json`, 'utf8')
const test = fs.readFileSync(`${tmpobj.name}/hdf2ckl_test.ckl`, 'utf8')
const sample = fs.readFileSync(path.resolve('./test/sample_data/checklist/red_hat_good.ckl'), 'utf8')
expect(omitChecklistChangingFields(test)).to.eql(omitChecklistChangingFields(sample))
})

test
.stdout()
.command(['convert hdf2ckl', '-i', path.resolve('./test/sample_data/HDF/input/vSphere8_report.json'), '-o', `${tmpobj.name}/hdf2ckl_test.json`])
.command(['convert hdf2ckl', '-i', path.resolve('./test/sample_data/HDF/input/vSphere8_report.json'), '-o', `${tmpobj.name}/hdf2ckl_test.ckl`])
.it('hdf-converter output test - inspec results from profile with dependent profiles', () => {
const test = fs.readFileSync(`${tmpobj.name}/hdf2ckl_test.json`, 'utf8')
const test = fs.readFileSync(`${tmpobj.name}/hdf2ckl_test.ckl`, 'utf8')
const sample = fs.readFileSync(path.resolve('./test/sample_data/checklist/vSphere8_report.ckl'), 'utf8')
expect(omitChecklistChangingFields(test)).to.eql(omitChecklistChangingFields(sample))
})

test
.stdout()
.command(['convert hdf2ckl', '-i', path.resolve('./test/sample_data/HDF/input/red_hat_good.json'), '-o', `${tmpobj.name}/hdf2ckl_metadata_test.json`, '-m', path.resolve('./test/sample_data/checklist/metadata.json')])
.command(['convert hdf2ckl', '-i', path.resolve('./test/sample_data/HDF/input/red_hat_good.json'), '-o', `${tmpobj.name}/hdf2ckl_metadata_test.ckl`, '-m', path.resolve('./test/sample_data/checklist/metadata.json')])
.it('hdf-converter output test - with metadata', () => {
const test = fs.readFileSync(`${tmpobj.name}/hdf2ckl_metadata_test.json`, 'utf8')
const test = fs.readFileSync(`${tmpobj.name}/hdf2ckl_metadata_test.ckl`, 'utf8')
const sample = fs.readFileSync(path.resolve('./test/sample_data/checklist/red_hat_good_metadata.ckl'), 'utf8')
expect(omitChecklistChangingFields(test)).to.eql(omitChecklistChangingFields(sample))
})

test
.stdout()
.command(['convert hdf2ckl', '-i', path.resolve('./test/sample_data/HDF/input/RHEL7_overrides_hdf.json'), '-o', `${tmpobj.name}/hdf2ckl_overrides_test.ckl`])
.it('hdf-converter output test - with severity overrides', () => {
const test = fs.readFileSync(`${tmpobj.name}/hdf2ckl_overrides_test.ckl`, 'utf8')
const sample = fs.readFileSync(path.resolve('./test/sample_data/checklist/converted-rhel7_overrides.ckl'), 'utf8')
expect(omitChecklistChangingFields(test)).to.eql(omitChecklistChangingFields(sample))
})
})
Loading
Loading