diff --git a/MANIFEST.in b/MANIFEST.in index 13ce5eb0..6f84fb27 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -10,3 +10,4 @@ recursive-include whatrecord/grammar *.lark recursive-include whatrecord/tests/epicsarch/ *.txt recursive-include whatrecord/tests *.db *.cmd *.dbd *.proto *.pvlist *.acf t1-*.txt *.st LICENSE recursive-include whatrecord/tests *.substitutions *.substitutions.expanded +recursive-include whatrecord/tests Makefile diff --git a/README.rst b/README.rst index 127f8909..70c22288 100644 --- a/README.rst +++ b/README.rst @@ -63,11 +63,18 @@ Plugins * LCLS-specific epicsArch / logbook DAQ PVs * TwinCAT PLC source code (pytmc) +Makefile / build system information + +* Determine build dependencies from a ``Makefile`` +* Recursively inspect sub-dependencies +* Graph IOC dependency information or output it as JSON + Command-line tools * ``whatrecord lint`` - lint a database * ``whatrecord parse`` - parse supported formats * ``whatrecord server`` - start the API server +* ``whatrecord graph`` - graph PV relationships, SNL diagrams, IOC dependencies Record? ------- diff --git a/docs/docs/index.md b/docs/docs/index.md index 18078fa7..9c05d959 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -56,11 +56,18 @@ It contains interfaces for: * LCLS-specific epicsArch / logbook DAQ PVs * TwinCAT PLC source code (pytmc) +### Makefile / build system information + +* Determine build dependencies from a ``Makefile`` +* Recursively inspect sub-dependencies +* Graph IOC dependency information or output it as JSON + ### Command-line tools * ``whatrecord lint`` - lint a database * ``whatrecord parse`` - parse supported formats to JSON * ``whatrecord server`` - start the API server +* ``whatrecord graph`` - graph PV relationships, SNL diagrams, IOC dependencies * Plugins can similarly be executed to provide parsed information in JSON ## Installing diff --git a/whatrecord/__init__.py b/whatrecord/__init__.py index 9447ca92..63cb477e 100644 --- a/whatrecord/__init__.py +++ b/whatrecord/__init__.py @@ -3,12 +3,14 @@ __version__ = _version.get_versions()['version'] from .access_security import AccessSecurityConfig +from .common import FileFormat from .db import Database from .dbtemplate import TemplateSubstitution from .gateway import GatewayConfig from .gateway import PVList as GatewayPVList from .iocsh import parse_iocsh_line from .macro import MacroContext +from .parse import parse from .plugins.epicsarch import LclsEpicsArchFile from .snl import SequencerProgram from .streamdevice import StreamProtocol @@ -16,6 +18,7 @@ __all__ = [ "AccessSecurityConfig", "Database", + "FileFormat", "GatewayConfig", "GatewayPVList", "LclsEpicsArchFile", @@ -24,4 +27,5 @@ "StreamProtocol", "TemplateSubstitution", "parse_iocsh_line", + "parse", ] diff --git a/whatrecord/bin/deps.py b/whatrecord/bin/deps.py new file mode 100644 index 00000000..0795adf3 --- /dev/null +++ b/whatrecord/bin/deps.py @@ -0,0 +1,102 @@ +""" +"whatrecord deps" is used to get dependency information from EPICS IOC or +module Makefiles. + +Under the hood, this uses GNU make, which is an external dependency required +for correct functionality. +""" + +import argparse +import json +import logging +import sys +from typing import Optional + +import apischema + +from ..common import AnyPath +from ..format import FormatContext +from ..makefile import DependencyGroup, DependencyGroupGraph, Makefile +from .graph import render_graph_to_file + +logger = logging.getLogger(__name__) +DESCRIPTION = __doc__ + + +def build_arg_parser(parser=None): + if parser is None: + parser = argparse.ArgumentParser() + + parser.description = DESCRIPTION + parser.formatter_class = argparse.RawTextHelpFormatter + + parser.add_argument( + "path", type=str, help="Path to IOC or Makefile itself" + ) + + parser.add_argument( + "--keep-os-env", + action="store_true", + help=( + "Keep environment variables present outside of ``make`` in the " + ".env dictionaries" + ) + ) + + parser.add_argument( + "--no-recurse", + action="store_true", + help="Do not recurse into dependencies", + ) + + parser.add_argument( + "--friendly", + dest="friendly", + action="store_true", + help="Output user-friendly text instead of JSON", + ) + + parser.add_argument( + "--graph", + action="store_true", + help="Output a graph of dependencies", + ) + + parser.add_argument( + "-o", "--graph-output", + type=str, + required=False, + help="Output file to write to. Defaults to standard output.", + ) + + return parser + + +def main( + path: AnyPath, + friendly: bool = False, + no_recurse: bool = False, + keep_os_env: bool = False, + graph: bool = False, + graph_output: Optional[str] = None, + file=sys.stdout, +): + makefile_path = Makefile.find_makefile(path) + makefile = Makefile.from_file(makefile_path, keep_os_env=keep_os_env) + info = DependencyGroup.from_makefile( + makefile, recurse=not no_recurse, keep_os_env=keep_os_env + ) + + if graph: + group_graph = DependencyGroupGraph(info) + render_graph_to_file(group_graph.to_digraph(), filename=graph_output) + # An alternative to 'whatrecord graph'; both should have the same + # result in the end. + return + + if not friendly: + json_info = apischema.serialize(info) + print(json.dumps(json_info, indent=4)) + else: + fmt = FormatContext() + print(fmt.render_object(info, "console"), file=file) diff --git a/whatrecord/bin/graph.py b/whatrecord/bin/graph.py index b4b10249..269f5782 100644 --- a/whatrecord/bin/graph.py +++ b/whatrecord/bin/graph.py @@ -18,6 +18,7 @@ from ..common import AnyPath from ..db import Database, LinterResults from ..graph import RecordLinkGraph, build_database_relations, graph_links +from ..makefile import DependencyGroup, DependencyGroupGraph, Makefile from ..shell import LoadedIoc from ..snl import SequencerProgram from .parse import parse_from_cli_args @@ -217,6 +218,8 @@ def main( if isinstance(item, SequencerProgram): graph = item.as_graph(include_code=code) + elif isinstance(item, Makefile): + graph = DependencyGroupGraph(DependencyGroup.from_makefile(item)) else: raise RuntimeError( f"Sorry, graph isn't supported yet for {item.__class__.__name__}" diff --git a/whatrecord/bin/main.py b/whatrecord/bin/main.py index 2b68ada5..e1d0d191 100644 --- a/whatrecord/bin/main.py +++ b/whatrecord/bin/main.py @@ -16,7 +16,18 @@ DESCRIPTION = __doc__ RETURN_VALUE = None -MODULES = ("server", "iocmanager_loader", "info", "lint", "parse", "graph") +# The whatrecord.bin modules are listed here and imported dynamically such +# that any missing dependencies for that command will not stop other +# entrypoints from working: +MODULES = ( + "deps", + "graph", + "info", + "iocmanager_loader", + "lint", + "parse", + "server", +) def _try_import(module): diff --git a/whatrecord/bin/parse.py b/whatrecord/bin/parse.py index 84990a09..a5a97329 100644 --- a/whatrecord/bin/parse.py +++ b/whatrecord/bin/parse.py @@ -5,24 +5,16 @@ """ import argparse -import asyncio import json import logging -import pathlib -from typing import Dict, List, Optional, Union +from typing import List, Optional import apischema -from ..access_security import AccessSecurityConfig from ..common import AnyPath, FileFormat, IocMetadata -from ..db import Database, LinterResults -from ..dbtemplate import TemplateSubstitution from ..format import FormatContext -from ..gateway import PVList as GatewayPVList -from ..macro import MacroContext +from ..parse import ParseResult, parse from ..shell import LoadedIoc -from ..snl import SequencerProgram -from ..streamdevice import StreamProtocol logger = logging.getLogger(__name__) DESCRIPTION = __doc__ @@ -104,132 +96,6 @@ def build_arg_parser(parser=None): return parser -ParseResult = Union[ - AccessSecurityConfig, - Database, - GatewayPVList, - LinterResults, - LoadedIoc, - SequencerProgram, - StreamProtocol, - TemplateSubstitution, -] - - -def parse( - filename: AnyPath, - dbd: Optional[str] = None, - standin_directories: Optional[Dict[str, str]] = None, - macros: Optional[str] = None, - use_gdb: bool = False, - format: Optional[FileFormat] = None, - expand: bool = False, - v3: bool = False, -) -> ParseResult: - """ - Generically parse either a startup script or a database file. - - Hopefully does the right thing based on file extension. If not, use - the ``format`` kwarg to specify it directly. - - Parameters - ---------- - filename : str or pathlib.Path - The filename to parse. - - dbd : str or pathlib.Path, optional - The associated database definition file, if parsing a database or - substitutions file. - - standin_directories : dict, optional - Rewrite hard-coded directory prefixes by setting:: - - standin_directories = {"/replace_this/": "/with/this"} - - macros : str, optional - Macro string to use when parsing the file. - - expand : bool, optional - Expand a substitutions file. - - v3 : bool, optional - Use V3 database grammar where applicable. - """ - standin_directories = standin_directories or {} - - filename = pathlib.Path(filename) - - # The shared macro context - used in different ways below: - macro_context = MacroContext(macro_string=macros or "") - - if format is None: - format = FileFormat.from_filename(filename) - - if format in (FileFormat.database, FileFormat.database_definition): - if format == FileFormat.database_definition or not dbd: - return Database.from_file( - filename, macro_context=macro_context, version=3 if v3 else 4 - ) - return LinterResults.from_database_file( - db_filename=filename, - dbd=Database.from_file(dbd, version=3 if v3 else 4), - macro_context=macro_context - ) - - if format == FileFormat.iocsh: - md = IocMetadata.from_filename( - filename, - standin_directories=standin_directories, - macros=dict(macro_context), - ) - if use_gdb: - try: - asyncio.run(md.get_binary_information()) - except KeyboardInterrupt: - logger.info("Skipping gdb information...") - - return LoadedIoc.from_metadata(md) - - if format == FileFormat.substitution: - template = TemplateSubstitution.from_file(filename) - if not expand: - return template - - database_text = template.expand_files() - # It's technically possible that this *isn't* a database file; so - # perhaps a `whatrecord msi` could be implemented in the future. - return Database.from_string( - database_text, - macro_context=macro_context, - dbd=Database.from_file(dbd) if dbd is not None else None, - filename=filename, - version=3 if v3 else 4, - ) - - if format == FileFormat.state_notation: - return SequencerProgram.from_file(filename) - - with open(filename, "rt") as fp: - contents = fp.read() - - if macros: - contents = macro_context.expand_file(contents) - - if format == FileFormat.gateway_pvlist: - return GatewayPVList.from_string(contents, filename=filename) - - if format == FileFormat.access_security: - return AccessSecurityConfig.from_string(contents, filename=filename) - - if format == FileFormat.stream_protocol: - return StreamProtocol.from_string(contents, filename=filename) - - raise RuntimeError( - f"Sorry, whatrecord doesn't support the {format!r} format just yet in the " - f"CLI parsing tool. Please open an issue." - ) - - def parse_from_cli_args( filename: AnyPath, dbd: Optional[str] = None, diff --git a/whatrecord/common.py b/whatrecord/common.py index b7b97a84..a5df93fb 100644 --- a/whatrecord/common.py +++ b/whatrecord/common.py @@ -36,6 +36,7 @@ class FileFormat(str, enum.Enum): access_security = 'access_security' stream_protocol = 'stream_protocol' state_notation = 'state_notation' + makefile = 'makefile' @classmethod def from_extension(cls, extension: str) -> FileFormat: @@ -55,7 +56,11 @@ def from_extension(cls, extension: str) -> FileFormat: @classmethod def from_filename(cls, filename: AnyPath) -> FileFormat: """Get a file format based on a full filename.""" - extension = pathlib.Path(filename).suffix.lstrip(".") + path = pathlib.Path(filename) + extension = path.suffix.lstrip(".") + if not extension and path.name.startswith("Makefile"): + return FileFormat.makefile + try: return FileFormat.from_extension(extension) except KeyError: @@ -440,7 +445,7 @@ def empty(cls): startup_directory=pathlib.Path()) @classmethod - def from_filename( + def from_file( cls, filename: Union[pathlib.Path, str], *, @@ -476,6 +481,9 @@ def from_filename( base_version=base_version, ) + #: Back-compat: from_filename is deprecated + from_filename = from_file + @classmethod def from_dict(cls, iocdict: IocInfoDict, macros: Optional[Dict[str, str]] = None): """ diff --git a/whatrecord/graph.py b/whatrecord/graph.py index f354fb51..b1508c64 100644 --- a/whatrecord/graph.py +++ b/whatrecord/graph.py @@ -89,6 +89,7 @@ def add_edge( self, source: str, destination: str, + allow_dupes: bool = False, **options ) -> GraphEdge: """Create a new edge in the graph.""" @@ -97,6 +98,8 @@ def add_edge( self.get_node(destination), options ) + if edge in self.edges and not allow_dupes: + return self.edges[self.edges.index(edge)] self.edges.append(edge) return edge diff --git a/whatrecord/ioc_finder.py b/whatrecord/ioc_finder.py index 15c40a90..4185e8c6 100644 --- a/whatrecord/ioc_finder.py +++ b/whatrecord/ioc_finder.py @@ -47,4 +47,4 @@ class IocScriptStaticList(_IocInfoFinder): def __post_init__(self, script_list: List[IocInfoDict]): for fn in script_list: - self.add_or_update_entry(IocMetadata.from_filename(fn)) + self.add_or_update_entry(IocMetadata.from_file(fn)) diff --git a/whatrecord/makefile.py b/whatrecord/makefile.py new file mode 100644 index 00000000..2a2b6d08 --- /dev/null +++ b/whatrecord/makefile.py @@ -0,0 +1,611 @@ +from __future__ import annotations + +import functools +import logging +import os +import pathlib +import shutil +import subprocess +import textwrap +from dataclasses import dataclass, field +from typing import Dict, List, Optional, Set, Union + +import apischema +import graphviz as gv + +from .common import AnyPath +from .graph import _GraphHelper +from .util import lines_between + +logger = logging.getLogger(__name__) + + +class MakeNotInstalled(RuntimeError): + ... + + +_section_start_marker = "--whatrecord-section-start--" +_section_end_marker = "--whatrecord-section-end--" +_whatrecord_target = "_whatrecord_target" + +_make_helper: str = fr""" + +# Trick borrowed from epics-sumo; thanks! +.EXPORT_ALL_VARIABLES: +# per GNU Make's documentation: +# Simply by being mentioned as a target, this +# tells make to export all variables to child processes by default. See +# Communicating Variables to a Sub-make. + +{_whatrecord_target}: + # This is the environment section; null-delimited list of env vars + @echo "{_section_start_marker}env" + @env -0 + @echo "{_section_end_marker}" + # This is the make meta information section, as specified by make itself + @echo "{_section_start_marker}default_goal" + @echo "$(.DEFAULT_GOAL)" + @echo "{_section_end_marker}" + @echo "{_section_start_marker}makefile_list" + @echo "$(MAKEFILE_LIST)" + @echo "{_section_end_marker}" + @echo "{_section_start_marker}make_features" + @echo "$(.FEATURES)" + @echo "{_section_start_marker}include_dirs" + @echo "$(.INCLUDE_DIRS)" + @echo "{_section_end_marker}" +""".replace(" ", "\t") + + +@functools.lru_cache(maxsize=None) +def host_has_make() -> bool: + """Does the host have ``make`` required to use this module?""" + return shutil.which("make") is not None + + +@dataclass +class Makefile: + """ + Makefile information as determined by ``make`` itself. + + Makes some assumptions about variables typically used in EPICS build + environments, but should fill in generic information for all non-EPICS + makefiles as well. + + Will not work if: + * ``.RECIPEPREFIX`` is set to anything but tab in the makefile, however + uncommon that may be. + """ + + #: Environment variable name to value. + env: Dict[str, str] = field(default_factory=dict) + #: .DEFAULT_GOAL, or the default ``make`` target. + default_goal: str = "" + #: .MAKEFILE_LIST, or the makefiles included in the build. + makefile_list: List[str] = field(default_factory=list) + #: .FEATURES, features supported by make + make_features: Set[str] = field(default_factory=set) + #: .INCLUDE_DIRS, include directories + include_dirs: List[str] = field(default_factory=list) + #: BUILD_ARCHS + build_archs: List[str] = field(default_factory=list) + #: CROSS_COMPILER_HOST_ARCHS + cross_compiler_host_archs: List[str] = field(default_factory=list) + #: CROSS_COMPILER_TARGET_ARCHS + cross_compiler_target_archs: List[str] = field(default_factory=list) + #: epics-base version. + base_version: Optional[str] = None + #: epics-base configure path. + base_config_path: Optional[pathlib.Path] = None + #: Variables defined in RELEASE_TOPS + release_top_vars: List[str] = field(default_factory=list) + #: The Makefile filename, if available. + filename: Optional[pathlib.Path] = None + #: The working directory used when invoking make: + working_directory: pathlib.Path = field(default_factory=pathlib.Path) + + def find_release_paths(self, check: bool = True) -> Dict[str, pathlib.Path]: + """ + Find paths defined in RELEASE. + + Parameters + ---------- + check : bool, optional + Check that the release path includes a ``configure`` directory and + a ``Makefile``. + """ + # TODO: are the checks here appropriate? Perhaps just a simple + # ``path/Makefile`` check is sufficient for the build system. + results = {} + for var in self.release_top_vars: + value = self.env.get(var, "").strip() + if not value: + continue + + # Assume it's not for windows, for now. Can't instantiate + # WindowsPath on linux anyway: + try: + path = (self.working_directory / value).resolve() + if not check or (path / "Makefile").is_file(): + results[var] = path + except Exception: + ... + + return results + + @classmethod + def _get_section(cls, output: str, section: str) -> str: + """Get a single make output section.""" + return "\n".join( + lines_between( + output, + start_marker=_section_start_marker + section, + end_marker=_section_end_marker, + include_blank=False, + ) + ).strip() + + @classmethod + def _get_env(cls, output: str, keep_os_env: bool = False) -> Dict[str, str]: + """Get environment variables from make output.""" + env = {} + for line in sorted(cls._get_section(output, "env").split("\0")): + if "=" in line: + variable, value = line.split("=", 1) + if not keep_os_env and os.environ.get(variable) == value: + ... + else: + env[variable] = value + return env + + @classmethod + def _get_make_vars(cls, output: str) -> Dict[str, str]: + """Get environment variables from make output.""" + makevars = { + var: cls._get_section(output, var) + for var in ( + "default_goal", + "makefile_list", + "make_features", + "include_dirs", + ) + } + + if makevars.get("default_goal", None) == _whatrecord_target: + # This means there's no default goal, and ours is the first + makevars["default_goal"] = "" + + return makevars + + @classmethod + def _from_make_output( + cls, + output: str, + working_directory: AnyPath, + filename: Optional[AnyPath] = None, + keep_os_env: bool = False, + ) -> Makefile: + """ + Parse ``make`` output with our helper target attached. + """ + if filename is not None: + filename = pathlib.Path(filename) + + env = cls._get_env(output, keep_os_env=keep_os_env) + make_vars = cls._get_make_vars(output) + return cls( + env=env, + filename=filename, + default_goal=make_vars.get("default_goal", ""), + makefile_list=make_vars.get("makefile_list", "").split(), + make_features=set(make_vars.get("make_features", "").split()), + include_dirs=make_vars.get("include_dirs", "").split(), + build_archs=env.get("BUILD_ARCHS", "").split(), + cross_compiler_host_archs=env.get("CROSS_COMPILER_HOST_ARCHS", "").split(), + cross_compiler_target_archs=env.get("CROSS_COMPILER_TARGET_ARCHS", "").split(), + base_version=env.get("BASE_MODULE_VERSION", ""), + base_config_path=pathlib.Path(env["CONFIG"]) if "CONFIG" in env else None, + release_top_vars=env.get("RELEASE_TOPS", "").split(), + working_directory=pathlib.Path(working_directory), + ) + + @classmethod + def from_string( + cls, + contents: str, + filename: Optional[AnyPath] = None, + working_directory: Optional[AnyPath] = None, + keep_os_env: bool = False, + encoding: str = "utf-8", + ) -> Makefile: + """ + Get Makefile information given its contents. + + Parameters + ---------- + contents : str + The Makefile contents. + + filename : pathlib.Path or str, optional + The filename. + + working_directory : pathlib.Path or str, optional + The working directory to use when evaluating the Makefile contents. + Assumed to be the directory in which the makefile is contained, but + this may be overridden. If the filename is unavailable, the + fallback is the current working directory. + + keep_os_env : bool, optional + Keep environment variables in ``.env`` from outside of ``make``, + as in those present in ``os.environ`` when executing ``make``. + + encoding : str, optional + String encoding to use. + + Raises + ------ + RuntimeError + If unable to run ``make`` and get information. + + Returns + ------- + makefile : Makefile + The makefile information. + """ + if not host_has_make(): + raise MakeNotInstalled("Host does not have ``make`` installed.") + + full_contents = "\n".join((contents, _make_helper)) + if logger.isEnabledFor(logging.DEBUG): + logger.debug( + "New makefile contents: %s", + full_contents.replace("\t", "(tab) "), + ) + + if working_directory is None: + if filename is not None: + working_directory = pathlib.Path(filename).resolve().parent + else: + working_directory = pathlib.Path.cwd() + + env = dict(os.environ) + # Shell updates this variable and Makefiles may rely on it: + env["PWD"] = str(working_directory) + + result = subprocess.run( + ["make", "--silent", "--file=-", _whatrecord_target], + input=full_contents.encode(encoding), + cwd=working_directory, + capture_output=True, + env=env, + ) + stdout = result.stdout.decode(encoding, "replace") + if logger.isEnabledFor(logging.DEBUG): + stderr = result.stderr.decode(encoding, "replace") + logger.debug( + "make output:\n%s\nmake stderr:\n%s", + textwrap.indent(stdout, " "), + textwrap.indent(stderr, " ") + ) + + return cls._from_make_output( + stdout, working_directory=working_directory, filename=filename + ) + + @classmethod + def from_file_obj( + cls, + fp, + filename: Optional[AnyPath] = None, + working_directory: Optional[AnyPath] = None, + keep_os_env: bool = False, + encoding: str = "utf-8", + ) -> Makefile: + """ + Load a Makefile from a file object. + + Parameters + ---------- + fp : file-like object + The file-like object to read from. + + filename : pathlib.Path or str, optional + The filename, defaults to ``fp.name`` if available. + + working_directory : pathlib.Path or str, optional + The working directory to use when evaluating the Makefile contents. + Assumed to be the directory in which the makefile is contained, but + this may be overridden. If the filename is unavailable, the + fallback is the current working directory. + + keep_os_env : bool, optional + Keep environment variables in ``.env`` from outside of ``make``, + as in those present in ``os.environ`` when executing ``make``. + + encoding : str, optional + String encoding to use. + + Raises + ------ + RuntimeError + If unable to run ``make`` and get information. + + Returns + ------- + makefile : Makefile + The makefile information. + """ + return cls.from_string( + fp.read(), + filename=filename or getattr(fp, "name", None), + working_directory=working_directory, + encoding=encoding, + keep_os_env=keep_os_env, + ) + + @classmethod + def from_file( + cls, + filename: AnyPath, + working_directory: Optional[AnyPath] = None, + keep_os_env: bool = False, + encoding: str = "utf-8", + ) -> Makefile: + """ + Load a Makefile from a filename. + + Parameters + ---------- + filename : pathlib.Path or str + The filename. + + working_directory : pathlib.Path or str, optional + The working directory to use when evaluating the Makefile contents. + Assumed to be the directory in which the makefile is contained, but + this may be overridden. + + keep_os_env : bool, optional + Keep environment variables in ``.env`` from outside of ``make``, + as in those present in ``os.environ`` when executing ``make``. + + encoding : str, optional + String encoding to use. + + Raises + ------ + RuntimeError + If unable to run ``make`` and get information. + + Returns + ------- + makefile : Makefile + The makefile information. + """ + with open(filename, "rt") as fp: + contents = fp.read() + return cls.from_string( + contents, + filename=filename, + working_directory=working_directory, + encoding=encoding, + keep_os_env=keep_os_env, + ) + + @staticmethod + def find_makefile( + file_or_directory: AnyPath, + ) -> pathlib.Path: + """ + Find a Makefile given a path, if it exists. + + Accepts either a direct path to a Makefile or a directory containing + a Makefile. + + Parameters + ---------- + file_or_directory : str or pathlib.Path + Makefile directory or direct path. + + Raises + ------ + FileNotFoundError + If the file or directory does not contain a ``Makefile``. + """ + path = pathlib.Path(file_or_directory) + + if path.is_dir(): + path = path.resolve() / "Makefile" + + if not path.exists(): + raise FileNotFoundError(file_or_directory) + + return path.resolve() + + +@dataclass +class Dependency: + """A single EPICS dependency (module) in the build system.""" + #: The name of the dependency, as derived from the path name + name: str = "" + #: The name of the dependency, as derived from the build system variable + variable_name: Optional[str] = None + #: The absolute path to the dependency + path: pathlib.Path = field(default_factory=pathlib.Path) + #: The parsed Makefile information for this dependency + makefile: Makefile = field(default_factory=Makefile) + #: Modules (etc.) which depend on this instance + dependents: List[pathlib.Path] = field(default_factory=list) + #: Modules (etc.) are required for this instance + dependencies: List[pathlib.Path] = field(default_factory=list) + #: The "root" node. ``None`` to refer to itself. + root: Optional[DependencyGroup] = field(default=None, metadata=apischema.metadata.skip) + + @classmethod + def from_makefile( + cls, + makefile: Makefile, + recurse: bool = True, + *, + keep_os_env: bool = False, + name: Optional[str] = None, + variable_name: Optional[str] = None, + root: Optional[DependencyGroup] = None, + ) -> Dependency: + if makefile.filename is not None: + name = name or makefile.filename.parent.name + path = makefile.filename.parent + else: + name = name or "unknown" + path = pathlib.Path(".") # :shrug: + + this_dep = cls( + name=name, + variable_name=variable_name, + path=path, + makefile=makefile, + dependents=[], + dependencies=[], + root=root, + ) + + if root is None: + return this_dep + + root.all_modules[this_dep.path] = this_dep + + if not recurse: + return this_dep + + for variable_name, path in makefile.find_release_paths().items(): + if path in root.all_modules: + release_dep = root.all_modules[path] + else: + try: + dep_makefile_path = Makefile.find_makefile(path) + except FileNotFoundError: + continue + + dep_makefile = Makefile.from_file( + dep_makefile_path, keep_os_env=keep_os_env + ) + release_dep = cls.from_makefile( + dep_makefile, + recurse=True, + keep_os_env=keep_os_env, + name=variable_name, # unclear the right approach here + variable_name=variable_name, + root=root, + ) + release_dep.dependents.append(this_dep.path) + this_dep.dependencies.append(release_dep.path) + + return this_dep + + +@dataclass +class DependencyGroup: + """ + IOC (or support module) dependency information. + + This differs + """ + #: The primary IOC or module this refers to. + root: pathlib.Path + #: All modules found while scanning for the root one. + all_modules: Dict[pathlib.Path, Dependency] = field(default_factory=dict) + + @classmethod + def from_makefile( + cls, + makefile: Makefile, + recurse: bool = True, + *, + keep_os_env: bool = False, + name: Optional[str] = None, + ) -> DependencyGroup: + if makefile.filename is None: + raise ValueError("The provided Makefile must have a path") + + info = cls(root=makefile.filename.parent, all_modules={}) + # The following will implicitly be inserted into all_modules + Dependency.from_makefile( + makefile=makefile, + name=name, + keep_os_env=keep_os_env, + root=info, + recurse=recurse, + ) + return info + + def as_graph(self, **kwargs) -> DependencyGroupGraph: + """ + Create a graphviz digraph of the dependencies. + + Returns + ------- + graph : DependencyGroupGraph + """ + return DependencyGroupGraph(self, **kwargs) + + +class DependencyGroupGraph(_GraphHelper): + """ + A graph for a group of dependencies. + + Parameters + ---------- + dep : Dependency or DependencyGroup, optional + A dep to add. + + highlight_states : list of str, optional + List of state names to highlight. + + include_code : bool, optional + Include code, where relevant, in nodes. + """ + + _entry_label: str = "_Entry_" + _exit_label: str = "_Exit_" + + def __init__( + self, + dep: Optional[Union[Dependency, DependencyGroup]] = None, + highlight_deps: Optional[List[str]] = None, + include_code: bool = False, + ): + super().__init__() + self.include_code = include_code + self.highlight_deps = highlight_deps or [] + if dep is not None: + self.add_dependency(dep) + + def add_dependency(self, item: Union[DependencyGroup, Dependency]): + """Add a dependency to the graph.""" + if isinstance(item, DependencyGroup): + for path, mod in item.all_modules.items(): + self.add_dependency(mod) + return + + if item.path in self.nodes: + return + + if item.variable_name != item.name: + node_text = f"{item.variable_name} {item.name}\n{item.path}" + else: + node_text = f"{item.variable_name}\n{item.path}" + + self.get_node(str(item.path), text=node_text) + + if item.root is None: + # Misconfiguration? + return + + for dep_path in item.dependencies: + dep = item.root.all_modules[dep_path] + self.add_dependency(dep) + self.add_edge(str(item.path), str(dep.path)) + + def _ready_for_digraph(self, graph: gv.Digraph): + """Hook when the user calls ``to_digraph``.""" + for node in self.nodes.values(): + node.highlighted = node.label in self.highlight_deps diff --git a/whatrecord/parse.py b/whatrecord/parse.py new file mode 100644 index 00000000..b70576f9 --- /dev/null +++ b/whatrecord/parse.py @@ -0,0 +1,151 @@ +""" +Generic parsing tools, for parsing when you know a file name or file type. +""" + +import asyncio +import logging +import pathlib +from typing import Dict, Optional, Union + +from .access_security import AccessSecurityConfig +from .common import AnyPath, FileFormat, IocMetadata +from .db import Database, LinterResults +from .dbtemplate import TemplateSubstitution +from .gateway import PVList as GatewayPVList +from .macro import MacroContext +from .makefile import Makefile +from .shell import LoadedIoc +from .snl import SequencerProgram +from .streamdevice import StreamProtocol + +logger = logging.getLogger(__name__) + + +ParseResult = Union[ + AccessSecurityConfig, + Database, + GatewayPVList, + LinterResults, + LoadedIoc, + SequencerProgram, + StreamProtocol, + TemplateSubstitution, + Makefile, +] + + +def parse( + filename: AnyPath, + dbd: Optional[str] = None, + standin_directories: Optional[Dict[str, str]] = None, + macros: Optional[str] = None, + use_gdb: bool = False, + format: Optional[FileFormat] = None, + expand: bool = False, + v3: bool = False, +) -> ParseResult: + """ + Generically parse either a startup script or a database file. + + Hopefully does the right thing based on file extension. If not, use + the ``format`` kwarg to specify it directly. + + Parameters + ---------- + filename : str or pathlib.Path + The filename to parse. + + dbd : str or pathlib.Path, optional + The associated database definition file, if parsing a database or + substitutions file. + + standin_directories : dict, optional + Rewrite hard-coded directory prefixes by setting:: + + standin_directories = {"/replace_this/": "/with/this"} + + macros : str, optional + Macro string to use when parsing the file. + + expand : bool, optional + Expand a substitutions file. + + v3 : bool, optional + Use V3 database grammar where applicable. + """ + standin_directories = standin_directories or {} + + filename = pathlib.Path(filename) + + # The shared macro context - used in different ways below: + macro_context = MacroContext(macro_string=macros or "") + + if format is None: + format = FileFormat.from_filename(filename) + + if format in (FileFormat.database, FileFormat.database_definition): + if format == FileFormat.database_definition or not dbd: + return Database.from_file( + filename, macro_context=macro_context, version=3 if v3 else 4 + ) + return LinterResults.from_database_file( + db_filename=filename, + dbd=Database.from_file(dbd, version=3 if v3 else 4), + macro_context=macro_context + ) + + if format == FileFormat.iocsh: + md = IocMetadata.from_file( + filename, + standin_directories=standin_directories, + macros=dict(macro_context), + ) + if use_gdb: + try: + asyncio.run(md.get_binary_information()) + except KeyboardInterrupt: + logger.info("Skipping gdb information...") + + return LoadedIoc.from_metadata(md) + + if format == FileFormat.substitution: + template = TemplateSubstitution.from_file(filename) + if not expand: + return template + + database_text = template.expand_files() + # It's technically possible that this *isn't* a database file; so + # perhaps a `whatrecord msi` could be implemented in the future. + return Database.from_string( + database_text, + macro_context=macro_context, + dbd=Database.from_file(dbd) if dbd is not None else None, + filename=filename, + version=3 if v3 else 4, + ) + + if format == FileFormat.state_notation: + return SequencerProgram.from_file(filename) + + with open(filename, "rt") as fp: + contents = fp.read() + + if macros: + contents = macro_context.expand_file(contents) + + if format == FileFormat.gateway_pvlist: + return GatewayPVList.from_string(contents, filename=filename) + + if format == FileFormat.access_security: + return AccessSecurityConfig.from_string(contents, filename=filename) + + if format == FileFormat.stream_protocol: + return StreamProtocol.from_string(contents, filename=filename) + + if format == FileFormat.makefile: + return Makefile.from_string(contents, filename=filename) + + raise RuntimeError( + f"Sorry, whatrecord doesn't support the {format!r} format just yet in the " + f"CLI parsing tool. Please open an issue." + ) diff --git a/whatrecord/tests/conftest.py b/whatrecord/tests/conftest.py index 7b7417a0..610df6b5 100644 --- a/whatrecord/tests/conftest.py +++ b/whatrecord/tests/conftest.py @@ -3,7 +3,7 @@ import pytest -from .. import settings +from .. import makefile, settings MODULE_PATH = pathlib.Path(__file__).resolve().parent @@ -23,3 +23,8 @@ for startup_script in STARTUP_SCRIPTS ] ) + +skip_without_make = pytest.mark.skipif( + not makefile.host_has_make(), + reason="Host does not have make" +) diff --git a/whatrecord/tests/deps/base/Makefile b/whatrecord/tests/deps/base/Makefile new file mode 100644 index 00000000..f216746e --- /dev/null +++ b/whatrecord/tests/deps/base/Makefile @@ -0,0 +1 @@ +# epics-base stand-in diff --git a/whatrecord/tests/deps/module_a/Makefile b/whatrecord/tests/deps/module_a/Makefile new file mode 100644 index 00000000..2991e82b --- /dev/null +++ b/whatrecord/tests/deps/module_a/Makefile @@ -0,0 +1,2 @@ +EPICS_BASE=$(CURDIR)/../base +RELEASE_TOPS=EPICS_BASE diff --git a/whatrecord/tests/deps/module_b/Makefile b/whatrecord/tests/deps/module_b/Makefile new file mode 100644 index 00000000..2991e82b --- /dev/null +++ b/whatrecord/tests/deps/module_b/Makefile @@ -0,0 +1,2 @@ +EPICS_BASE=$(CURDIR)/../base +RELEASE_TOPS=EPICS_BASE diff --git a/whatrecord/tests/deps/module_c/Makefile b/whatrecord/tests/deps/module_c/Makefile new file mode 100644 index 00000000..bffb67c0 --- /dev/null +++ b/whatrecord/tests/deps/module_c/Makefile @@ -0,0 +1,4 @@ +EPICS_BASE=$(CURDIR)/../base +MODULE_A=$(CURDIR)/../module_a +MODULE_B=$(CURDIR)/../module_b +RELEASE_TOPS=EPICS_BASE MODULE_A MODULE_B diff --git a/whatrecord/tests/test_bin_graph.py b/whatrecord/tests/test_bin_graph.py index 7d6ca81b..b46f308b 100644 --- a/whatrecord/tests/test_bin_graph.py +++ b/whatrecord/tests/test_bin_graph.py @@ -1,5 +1,7 @@ """Smoke tests to see if the provided IOCs load without crashing.""" +import pathlib + import pytest from ..bin.graph import main @@ -42,3 +44,16 @@ def test_graph_multiple_db(with_dbd): else: dbd_path = None main(filenames=db_paths, dbd=dbd_path, highlight=[".*"]) + + +@pytest.mark.parametrize( + "filename", + [ + # sequencer files + conftest.MODULE_PATH / "iocs" / "ioc_sequencer" / "sncExample.st", + # dependency graphs + conftest.MODULE_PATH / "deps" / "module_a" / "Makefile", + ] +) +def test_graph_other_formats_smoke(filename: pathlib.Path): + main(filenames=[filename], highlight=[".*"]) diff --git a/whatrecord/tests/test_bin_parse.py b/whatrecord/tests/test_bin_parse.py index 5b56398e..43f2e33d 100644 --- a/whatrecord/tests/test_bin_parse.py +++ b/whatrecord/tests/test_bin_parse.py @@ -5,7 +5,8 @@ import apischema import pytest -from ..bin.parse import main, parse +from ..bin.parse import main +from ..parse import parse from ..shell import LoadedIoc, ShellState from . import conftest diff --git a/whatrecord/tests/test_makefile.py b/whatrecord/tests/test_makefile.py new file mode 100644 index 00000000..3a9837c9 --- /dev/null +++ b/whatrecord/tests/test_makefile.py @@ -0,0 +1,305 @@ +import dataclasses +import pathlib +import subprocess +import textwrap +from typing import Optional, Set, Tuple + +import pytest + +from .. import makefile +from .conftest import MODULE_PATH, skip_without_make + +DEPS_MAKEFILE_ROOT = MODULE_PATH / "deps" + +# More coverage on debug log lines +makefile.logger.setLevel("DEBUG") + + +def prune_result( + result: makefile.Makefile, + expected: makefile.Makefile, + to_keep: Optional[Set[str]] = None, +) -> None: + """ + Prune resulting Makefile for the purposes of testing. + + * Remove extra environment variables that we don't want here + """ + for key in set(result.env) - set(expected.env): + result.env.pop(key) + + ignore = {"env", "filename"} + for field in dataclasses.fields(makefile.Makefile): + if field.name not in to_keep and field.name not in ignore: + setattr(result, field.name, getattr(expected, field.name)) + + +def get_makefile(contents: str, *, set_filename: bool = True) -> makefile.Makefile: + """Get a Makefile instance given its contents.""" + contents = textwrap.dedent(contents).replace(" ", "\t") + print("Creating Makefile from contents:") + print(contents) + make = makefile.Makefile.from_string( + contents, + filename=DEPS_MAKEFILE_ROOT / "Makefile.made_up" if set_filename else None, + ) + if set_filename: + # Make the object more realistic + make.name = "ioc" + make.variable_name = "ioc_var" + return make + + +def get_dependency_group( + contents: str, *, set_filename: bool = True +) -> Tuple[makefile.Makefile, makefile.DependencyGroup]: + """Get a DependencyGroup instance given a single Makefile's contents.""" + root = get_makefile(contents, set_filename=set_filename) + group = makefile.DependencyGroup.from_makefile(root) + if set_filename: + assert root.filename is not None + assert group.root == root.filename.parent + return root, group + + +def check_module_in_group( + group: makefile.DependencyGroup, variable_name: str, path: pathlib.Path +) -> makefile.Dependency: + """Check if a module is present in a DependencyGroup.""" + if path not in group.all_modules: + paths = ", ".join(str(path) for path in group.all_modules) + raise ValueError( + f"Module {variable_name} not found in dependency list. " + f"Paths: {paths}" + ) + module = group.all_modules[path] + assert module.variable_name == variable_name + return module + + +@skip_without_make +def test_make_version(): + version = subprocess.check_output(["make", "--version"]) + print(version) + + +@skip_without_make +@pytest.mark.parametrize( + "contents, to_keep, expected", + [ + pytest.param( + """ + WHATREC_A=B + WHATREC_C?=D + """, + set(), + makefile.Makefile( + env={"WHATREC_A": "B", "WHATREC_C": "D"}, + filename=None, + ), + id="simple-subst-0", + ), + pytest.param( + """ + WHATREC_A=A + WHATREC_B=$(WHATREC_A) + WHATREC_C?=C + """, + set(), + makefile.Makefile( + env={"WHATREC_A": "A", "WHATREC_B": "A", "WHATREC_C": "C"}, + filename=None, + ), + id="simple-subst-1", + ), + pytest.param( + """ + ENV_VAR=A + + all: + echo "Hi!" + """, + {"default_goal", }, + makefile.Makefile( + env={"ENV_VAR": "A"}, + default_goal="all", + filename=None, + ), + id="default-goal", + ), + pytest.param( + # These would typically be set by the epics build system; but let's + # set them ourselves as this is just a test of our stuff and not + # the EPICS build system: + """ + BUILD_ARCHS=arch1 arch2 + CROSS_COMPILER_HOST_ARCHS=arch3 arch4 + CROSS_COMPILER_TARGET_ARCHS=arch5 arch6 + BASE_MODULE_VERSION=R7.0.2 + CONFIG=/ + """, + { + "build_archs", + "cross_compiler_host_archs", + "cross_compiler_target_archs", + "base_version", + "base_config_path", + }, + makefile.Makefile( + env={}, + build_archs=["arch1", "arch2"], + cross_compiler_host_archs=["arch3", "arch4"], + cross_compiler_target_archs=["arch5", "arch6"], + base_version="R7.0.2", + base_config_path=pathlib.Path("/"), + filename=None, + ), + id="epics-build-system-vars", + ), + ] +) +def test_from_contents(contents: str, expected: makefile.Makefile, to_keep: Set[str]): + result = get_makefile(contents, set_filename=False) + print(result) + prune_result(result, expected, to_keep=to_keep) + assert result == expected + + +@skip_without_make +@pytest.mark.parametrize( + "contents", + [ + pytest.param( + f""" + EPICS_BASE={DEPS_MAKEFILE_ROOT}/base + RELEASE_TOP=EPICS_BASE + """, + id="initial", + ), + ] +) +def test_dependency_norecurse_smoke(contents: str): + make = get_makefile(contents) + dep = makefile.Dependency.from_makefile(make, name="name", variable_name="var") + assert dep.makefile == make + assert dep.name == "name" + assert dep.variable_name == "var" + + +@skip_without_make +def test_dependency_group_base_only(): + _, group = get_dependency_group( + f""" + EPICS_BASE={DEPS_MAKEFILE_ROOT}/base + RELEASE_TOPS=EPICS_BASE + """ + ) + + assert len(group.all_modules) == 2 + + base = check_module_in_group(group, "EPICS_BASE", DEPS_MAKEFILE_ROOT / "base") + # I'm not convinced this is the right behavior, but not sure if we can + # do better generically: + assert base.name == "EPICS_BASE" + + +@skip_without_make +def test_dependency_group_base_and_module_a(): + _, group = get_dependency_group( + f""" + EPICS_BASE={DEPS_MAKEFILE_ROOT}/base + MODULE_A={DEPS_MAKEFILE_ROOT}/module_a + RELEASE_TOPS=EPICS_BASE MODULE_A + """ + ) + + assert len(group.all_modules) == 3 + check_module_in_group(group, "EPICS_BASE", DEPS_MAKEFILE_ROOT / "base") + check_module_in_group(group, "MODULE_A", DEPS_MAKEFILE_ROOT / "module_a") + + +@skip_without_make +def test_dependency_group_base_and_layers(): + _, group = get_dependency_group( + f""" + EPICS_BASE={DEPS_MAKEFILE_ROOT}/base + MODULE_C={DEPS_MAKEFILE_ROOT}/module_c + RELEASE_TOPS=EPICS_BASE MODULE_C + """ + ) + + # test suite -> module_c -> module_a -> base + # | -> module_b -> base + # -> base + # + + base = check_module_in_group(group, "EPICS_BASE", DEPS_MAKEFILE_ROOT / "base") + ma = check_module_in_group(group, "MODULE_A", DEPS_MAKEFILE_ROOT / "module_a") + mb = check_module_in_group(group, "MODULE_B", DEPS_MAKEFILE_ROOT / "module_b") + mc = check_module_in_group(group, "MODULE_C", DEPS_MAKEFILE_ROOT / "module_c") + ioc_path = DEPS_MAKEFILE_ROOT # <-- test suite pseudo-makefile + assert set(group.all_modules) == { + ioc_path, + base.path, + ma.path, + mb.path, + mc.path, + } + assert len(group.all_modules) == 5 + + # Base + assert set(base.dependents) == {ma.path, mb.path, mc.path, ioc_path} + assert len(base.dependents) == 4 + assert len(base.dependencies) == 0 + + # module a + assert set(ma.dependencies) == {base.path} + assert len(ma.dependencies) == 1 + assert set(ma.dependents) == {mc.path} + assert len(ma.dependents) == 1 + + # module b + assert set(mb.dependencies) == {base.path} + assert len(mb.dependents) == 1 + assert set(mb.dependents) == {mc.path} + assert len(mb.dependents) == 1 + + # module c + assert set(mc.dependencies) == {base.path, ma.path, mb.path} + assert len(mc.dependencies) == 3 + assert set(mc.dependents) == {ioc_path} + assert len(mc.dependents) == 1 + + +@skip_without_make +def test_dependency_group_graph(): + _, group = get_dependency_group( + f""" + EPICS_BASE={DEPS_MAKEFILE_ROOT}/base + MODULE_C={DEPS_MAKEFILE_ROOT}/module_c + RELEASE_TOPS=EPICS_BASE MODULE_C + """ + ) + graph = group.as_graph() + assert len(graph.nodes) == 5 + + base = check_module_in_group(group, "EPICS_BASE", DEPS_MAKEFILE_ROOT / "base") + ma = check_module_in_group(group, "MODULE_A", DEPS_MAKEFILE_ROOT / "module_a") + mb = check_module_in_group(group, "MODULE_B", DEPS_MAKEFILE_ROOT / "module_b") + mc = check_module_in_group(group, "MODULE_C", DEPS_MAKEFILE_ROOT / "module_c") + + edge_pairs = list(graph.edge_pairs) + + def check_edge(source: makefile.Dependency, dest: makefile.Dependency): + node_source = graph.get_node(str(source.path)) + node_dest = graph.get_node(str(dest.path)) + assert (node_source, node_dest) in edge_pairs + + check_edge(source=ma, dest=base) + check_edge(source=mb, dest=base) + check_edge(source=mc, dest=base) + check_edge(source=mc, dest=ma) + check_edge(source=mc, dest=mb) + + # Smoke test to_digraph + graph.to_digraph() diff --git a/whatrecord/util.py b/whatrecord/util.py index 56077e39..868627fa 100644 --- a/whatrecord/util.py +++ b/whatrecord/util.py @@ -4,7 +4,7 @@ import logging import pathlib import textwrap -from typing import Dict, List, Optional, Tuple, TypeVar, Union +from typing import Dict, Generator, List, Optional, Tuple, TypeVar, Union import apischema @@ -204,3 +204,44 @@ def find_binary_from_hashbang( binary = parent_dir / first_line.lstrip("#!").strip() if not must_exist or binary.exists(): return str(binary.resolve()) + + +def lines_between( + text: str, + start_marker: str, + end_marker: str, + *, + include_blank: bool = False +) -> Generator[str, None, None]: + """ + From a block of text, yield all lines between `start_marker` and + `end_marker` + + Parameters + ---------- + text : str + The block of text + start_marker : str + The block-starting marker to match + end_marker : str + The block-ending marker to match + include_blank : bool, optional + Skip yielding blank lines + + Yields + ------ + line : str + Line of text found between the markers. + """ + found_start = False + start_marker = start_marker.lower() + end_marker = end_marker.lower() + for line in text.splitlines(): + line_lowercase = line.strip().lower() + if line_lowercase == start_marker: + found_start = True + elif found_start: + if line_lowercase == end_marker: + break + elif line_lowercase or include_blank: + yield line