Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: merge control markdown with json (#1528) #1740

Open
wants to merge 7 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,9 @@ def tmp_empty_cwd(tmp_path: pathlib.Path) -> Iterator[pathlib.Path]:
@pytest.fixture(scope='function')
def testdata_dir() -> pathlib.Path:
"""Return absolute path to test data directory."""
test_data_source = pathlib.Path('tests/data')
test_dir = pathlib.Path(__file__).parent.resolve()
data_path = test_dir / 'data'
test_data_source = pathlib.Path(data_path)
return test_data_source.resolve()


Expand Down
17 changes: 9 additions & 8 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,26 +53,27 @@

logger = logging.getLogger(__name__)

BASE_TMP_DIR = pathlib.Path('tests/__tmp_path').resolve()
YAML_TEST_DATA_PATH = pathlib.Path('tests/data/yaml/').resolve()
JSON_TEST_DATA_PATH = pathlib.Path('tests/data/json/').resolve()
ENV_TEST_DATA_PATH = pathlib.Path('tests/data/env/').resolve()
JSON_NIST_DATA_PATH = pathlib.Path('nist-content/nist.gov/SP800-53/rev5/json/').resolve()
TEST_DIR = pathlib.Path(__file__).parent.resolve()
BASE_TMP_DIR = pathlib.Path(TEST_DIR / '__tmp_path').resolve()
YAML_TEST_DATA_PATH = pathlib.Path(TEST_DIR / 'data/yaml/').resolve()
JSON_TEST_DATA_PATH = pathlib.Path(TEST_DIR / 'data/json/').resolve()
ENV_TEST_DATA_PATH = pathlib.Path(TEST_DIR / 'data/env/').resolve()
JSON_NIST_DATA_PATH = pathlib.Path(TEST_DIR / '../nist-content/nist.gov/SP800-53/rev5/json/').resolve()
JSON_NIST_CATALOG_NAME = 'NIST_SP-800-53_rev5_catalog.json'
JSON_NIST_PROFILE_NAME = 'NIST_SP-800-53_rev5_MODERATE-baseline_profile.json'
JSON_NIST_REV_4_DATA_PATH = pathlib.Path('nist-content/nist.gov/SP800-53/rev4/json/').resolve()
JSON_NIST_REV_4_DATA_PATH = pathlib.Path(TEST_DIR / '../nist-content/nist.gov/SP800-53/rev4/json/').resolve()
JSON_NIST_REV_4_CATALOG_NAME = 'NIST_SP-800-53_rev4_catalog.json'
JSON_NIST_REV_5_CATALOG_NAME = 'nist-rev5-catalog-full.json'
JSON_NIST_REV_4_PROFILE_NAME = 'NIST_SP-800-53_rev4_MODERATE-baseline_profile.json'
SIMPLIFIED_NIST_CATALOG_NAME = 'simplified_nist_catalog.json'
SIMPLIFIED_NIST_PROFILE_NAME = 'simplified_nist_profile.json'
TASK_XLSX_OUTPUT_PATH = pathlib.Path('tests/data/tasks/xlsx/output').resolve()
TASK_XLSX_OUTPUT_PATH = pathlib.Path(TEST_DIR / 'data/tasks/xlsx/output').resolve()

CATALOGS_DIR = 'catalogs'
PROFILES_DIR = 'profiles'
COMPONENT_DEF_DIR = 'component-definitions'

NIST_EXAMPLES = pathlib.Path('nist-content/examples')
NIST_EXAMPLES = pathlib.Path(TEST_DIR / '../nist-content/examples')
NIST_SAMPLE_CD_JSON = NIST_EXAMPLES / 'component-definition' / 'json' / 'example-component.json'

NEW_MODEL_AGE_SECONDS = 100
Expand Down
27 changes: 20 additions & 7 deletions tests/trestle/core/commands/validate_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,6 @@
from trestle.oscal.common import ResponsibleParty, Role
from trestle.oscal.component import ComponentDefinition, ControlImplementation

test_data_dir = pathlib.Path('tests/data').resolve()

md_path = 'md_comp'


Expand All @@ -58,8 +56,11 @@
('my_test_model', '-t', False), ('my_test_model', '-a', False), ('my_test_model', '-x', False)
]
)
def test_validation_happy(name, mode, parent, tmp_trestle_dir: pathlib.Path, monkeypatch: MonkeyPatch) -> None:
def test_validation_happy(
name, mode, parent, tmp_trestle_dir: pathlib.Path, testdata_dir: pathlib.Path, monkeypatch: MonkeyPatch
) -> None:
"""Test successful validation runs."""
test_data_dir = testdata_dir
(tmp_trestle_dir / test_utils.CATALOGS_DIR / 'my_test_model').mkdir(exist_ok=True, parents=True)
(tmp_trestle_dir / test_utils.CATALOGS_DIR / 'my_test_model2').mkdir(exist_ok=True, parents=True)
shutil.copyfile(
Expand Down Expand Up @@ -101,9 +102,10 @@ def test_validation_happy(name, mode, parent, tmp_trestle_dir: pathlib.Path, mon
]
)
def test_validation_unhappy(
name, mode, parent, status, tmp_trestle_dir: pathlib.Path, monkeypatch: MonkeyPatch
name, mode, parent, status, tmp_trestle_dir: pathlib.Path, testdata_dir: pathlib.Path, monkeypatch: MonkeyPatch
) -> None:
"""Test failure modes of validation."""
test_data_dir = testdata_dir
(tmp_trestle_dir / test_utils.CATALOGS_DIR / 'my_test_model').mkdir(exist_ok=True, parents=True)
(tmp_trestle_dir / test_utils.CATALOGS_DIR / 'my_test_model2').mkdir(exist_ok=True, parents=True)
shutil.copyfile(
Expand Down Expand Up @@ -421,8 +423,12 @@ def test_period(tmp_trestle_dir: pathlib.Path, monkeypatch: MonkeyPatch) -> None
pass


def test_validate_component_definition(tmp_trestle_dir: pathlib.Path, monkeypatch: MonkeyPatch) -> None:
def test_validate_component_definition(
tmp_trestle_dir: pathlib.Path, testdata_dir: pathlib.Path, monkeypatch: MonkeyPatch
) -> None:
"""Test validation of Component Definition."""
test_data_dir = testdata_dir

jfile = 'component-definition.json'

sdir = test_data_dir / 'validate' / 'component-definitions' / 'x1'
Expand All @@ -438,8 +444,11 @@ def test_validate_component_definition(tmp_trestle_dir: pathlib.Path, monkeypatc
test_utils.execute_command_and_assert(validate_command, 0, monkeypatch)


def test_validate_component_definition_ports(tmp_trestle_dir: pathlib.Path, monkeypatch: MonkeyPatch) -> None:
def test_validate_component_definition_ports(
tmp_trestle_dir: pathlib.Path, testdata_dir: pathlib.Path, monkeypatch: MonkeyPatch
) -> None:
"""Test validation of ports in Component Definition."""
test_data_dir = testdata_dir
jfile = 'component-definition.json'

sdir = test_data_dir / 'validate' / 'component-definitions' / 'x2'
Expand All @@ -455,8 +464,12 @@ def test_validate_component_definition_ports(tmp_trestle_dir: pathlib.Path, monk
test_utils.execute_command_and_assert(validate_command, 0, monkeypatch)


def test_validate_component_definition_ports_invalid(tmp_trestle_dir: pathlib.Path, monkeypatch: MonkeyPatch) -> None:
def test_validate_component_definition_ports_invalid(
tmp_trestle_dir: pathlib.Path, testdata_dir: pathlib.Path, monkeypatch: MonkeyPatch
) -> None:
"""Test validation of ports in Component Definition."""
test_data_dir = testdata_dir

jfile = 'component-definition.json'

sdir = test_data_dir / 'validate' / 'component-definitions' / 'x3'
Expand Down
6 changes: 4 additions & 2 deletions tests/trestle/core/control_io_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,10 +338,12 @@ def test_get_control_param_dict(tmp_trestle_dir: pathlib.Path) -> None:


@pytest.mark.parametrize('overwrite_header_values', [True, False])
def test_write_control_header_params(overwrite_header_values, tmp_path: pathlib.Path) -> None:
def test_write_control_header_params(
overwrite_header_values, tmp_path: pathlib.Path, testdata_dir: pathlib.Path
) -> None:
"""Test write/read of control header params."""
# orig file just has one param ac-1_prm_3
src_control_path = pathlib.Path('tests/data/author/controls/control_with_components_and_params.md')
src_control_path = pathlib.Path(testdata_dir / 'author/controls/control_with_components_and_params.md')
# header has two params - 3 and 4
header = {
const.SET_PARAMS_TAG: {
Expand Down
2 changes: 1 addition & 1 deletion trestle/core/catalog/catalog_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -904,5 +904,5 @@ def generate_control_rule_info(self, part_id_map: Dict[str, Dict[str, str]], con
if len(dup_comp_uuids) > 0:
# throw an exception if there are repeated component uuids
for comp_uuid in dup_comp_uuids:
logger.error(f'Component uuid { comp_uuid } is duplicated')
logger.error(f'Component uuid {comp_uuid} is duplicated')
raise TrestleError('Component uuids cannot be duplicated between different component definitions')
2 changes: 2 additions & 0 deletions trestle/core/control_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -591,6 +591,8 @@ def merge_dicts_deep(
New items are always added from src to dest.
Items present in both will be overriden dest if overwrite_header_values is True.
"""
if src is None:
return
for key in src.keys():
if key in dest:
if depth and level == depth:
Expand Down
4 changes: 3 additions & 1 deletion trestle/core/control_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,8 +516,10 @@ def write_control_for_editing(
control_file = dest_path / (control.id + const.MARKDOWN_FILE_EXT)
# read the existing markdown header and content if it exists
md_header, comp_dict = ControlReader.read_control_info_from_md(control_file, context)
# replace the memory comp_dict with the md one if control exists
# Merge the memory comp_dict with the md one if control exists
if comp_dict:
template_comp_dict = context.comp_dict
ControlInterface.merge_dicts_deep(comp_dict, template_comp_dict, False)
context.comp_dict = comp_dict

header_comment_dict = {const.TRESTLE_ADD_PROPS_TAG: const.YAML_PROPS_COMMENT}
Expand Down
4 changes: 2 additions & 2 deletions trestle/transforms/implementations/tanium.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def transform(self, blob: str) -> Results:
results.__root__ = tanium_oscal_factory.results
ts1 = datetime.datetime.now()
self._analysis = tanium_oscal_factory.analysis
self._analysis.append(f'transform time: {ts1-ts0}')
self._analysis.append(f'transform time: {ts1 - ts0}')
return results


Expand Down Expand Up @@ -455,7 +455,7 @@ def _batch_observations(self, index: int) -> Dict[str, List[Observation]]:
start = index * batch_size
end = (index + 1) * batch_size
end = min(end, len(self._rule_use_list))
logger.debug(f'start: {start} end: {end-1}')
logger.debug(f'start: {start} end: {end - 1}')
# process just the one chunk
for i in range(start, end):
rule_use = self._rule_use_list[i]
Expand Down
Loading