-
Notifications
You must be signed in to change notification settings - Fork 1.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #215 from LeStarch/release/release-1.5.0
Release/release 1.5.0
- Loading branch information
Showing
4,733 changed files
with
712,312 additions
and
25,919 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,42 +1,40 @@ | ||
''' | ||
""" | ||
NAME: JSONDictonaryGen.py | ||
DESCRIPTION: Reads topology XML to produce command, EVR, and channel JSON | ||
dictionaries | ||
AUTHOR: Aaron Doubek-Kraft [email protected] | ||
''' | ||
""" | ||
|
||
import os | ||
import logging | ||
import json | ||
import logging | ||
import os | ||
import sys | ||
|
||
from optparse import OptionParser | ||
|
||
from fprime_ac.models import ModelParser | ||
from fprime_ac.parsers import XmlTopologyParser | ||
from fprime_ac.parsers import XmlSerializeParser | ||
from fprime_ac.parsers import XmlSerializeParser, XmlTopologyParser | ||
from fprime_ac.utils import Logger | ||
from fprime_ac.utils.buildroot import get_build_roots, set_build_roots | ||
|
||
|
||
# Version label for now | ||
class Version: | ||
id = "0.1" | ||
id = "0.1" | ||
comment = "Initial prototype" | ||
|
||
|
||
VERSION = Version() | ||
|
||
|
||
def format_type_item(typeItem): | ||
typeIsTuple = (str(type(typeItem)) == '<type \'tuple\'>') | ||
typeIsTuple = str(type(typeItem)) == "<type 'tuple'>" | ||
if typeIsTuple: | ||
description = typeItem[0] | ||
values = typeItem[1] | ||
valuesArr = [] | ||
for value in values: | ||
valuesArr.append({ | ||
"name": value[0], | ||
"value": value[1], | ||
"comment": value[2] | ||
}) | ||
valuesArr.append({"name": value[0], "value": value[1], "comment": value[2]}) | ||
|
||
typeObj = { | ||
"type": description[0], | ||
|
@@ -48,6 +46,7 @@ def format_type_item(typeItem): | |
|
||
return typeObj | ||
|
||
|
||
def pinit(): | ||
""" | ||
Initialize the option parser and return it. | ||
|
@@ -57,25 +56,39 @@ def pinit(): | |
|
||
usage = "usage: %prog [options] [xml_filename]" | ||
vers = "%prog " + VERSION.id + " " + VERSION.comment | ||
program_longdesc = ''' | ||
program_longdesc = """ | ||
This script reads F' topology XML and produces dictionaries represented as | ||
JSON. These documents contain all command, evr, and channel telemetry | ||
descriptions. | ||
''' | ||
""" | ||
program_license = "Copyright 2018 aarondou (California Institute of Technology) \ | ||
ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged." | ||
|
||
parser = OptionParser(usage, version=vers, epilog=program_longdesc,description=program_license) | ||
parser = OptionParser( | ||
usage, version=vers, epilog=program_longdesc, description=program_license | ||
) | ||
|
||
parser.add_option("-p", "--path", dest="work_path", type="string", | ||
parser.add_option( | ||
"-p", | ||
"--path", | ||
dest="work_path", | ||
type="string", | ||
help="Switch to new working directory (def: %s)." % current_dir, | ||
action="store", default=current_dir) | ||
|
||
parser.add_option("-L", "--logger-output-file", dest="logger_output", | ||
default=None, help="Set the logger output file. (def: stdout).") | ||
action="store", | ||
default=current_dir, | ||
) | ||
|
||
parser.add_option( | ||
"-L", | ||
"--logger-output-file", | ||
dest="logger_output", | ||
default=None, | ||
help="Set the logger output file. (def: stdout).", | ||
) | ||
|
||
return parser | ||
|
||
|
||
def main(): | ||
|
||
parser = pinit() | ||
|
@@ -84,8 +97,7 @@ def main(): | |
Logger.connectOutputLogger(opts.logger_output) | ||
|
||
# Global logger init. below. | ||
PRINT = logging.getLogger('output') | ||
DEBUG = logging.getLogger('debug') | ||
PRINT = logging.getLogger("output") | ||
|
||
# | ||
# Parse the input Component XML file and create internal meta-model | ||
|
@@ -98,14 +110,16 @@ def main(): | |
else: | ||
xmlFilename = args[0] | ||
|
||
# Check for BUILD_ROOT env. variable | ||
if ('BUILD_ROOT' in list(os.environ.keys())) == False: | ||
PRINT.info("ERROR: The -b command option requires that BUILD_ROOT environmental variable be set to root build path...") | ||
sys.exit(-1) | ||
# | ||
# Check for BUILD_ROOT variable for XML port searches | ||
# | ||
if not opts.build_root_overwrite is None: | ||
set_build_roots(opts.build_root_overwrite) | ||
else: | ||
BUILD_ROOT = os.environ['BUILD_ROOT'] | ||
ModelParser.BUILD_ROOT = BUILD_ROOT | ||
PRINT.info("BUILD_ROOT set to %s in environment" % BUILD_ROOT) | ||
if ("BUILD_ROOT" in os.environ.keys()) == False: | ||
print("ERROR: Build root not set to root build path...") | ||
sys.exit(-1) | ||
set_build_roots(os.environ["BUILD_ROOT"]) | ||
|
||
parsedTopology = XmlTopologyParser.XmlTopologyParser(xmlFilename) | ||
deployment = parsedTopology.get_deployment() | ||
|
@@ -118,38 +132,51 @@ def main(): | |
"events": {}, | ||
"channels": {}, | ||
"commands": {}, | ||
"serializables": {} | ||
"serializables": {}, | ||
} | ||
|
||
events = dictionary[deployment]["events"] | ||
channels = dictionary[deployment]["channels"] | ||
commands = dictionary[deployment]["commands"] | ||
serializables = dictionary[deployment]["serializables"] | ||
limitLabels = ["low_red", "low_orange", "low_yellow", "high_yellow", "high_orange", "high_red"] | ||
limitLabels = [ | ||
"low_red", | ||
"low_orange", | ||
"low_yellow", | ||
"high_yellow", | ||
"high_orange", | ||
"high_red", | ||
] | ||
unitLabels = ["label", "gain", "offset"] | ||
|
||
instanceIDs = { | ||
"events": {}, | ||
"channels": {}, | ||
"commands": {} | ||
} | ||
instanceIDs = {"events": {}, "channels": {}, "commands": {}} | ||
|
||
for inst in parsedTopology.get_instances(): | ||
serializableFilenames = inst.get_comp_xml().get_serializable_type_files() | ||
for filename in serializableFilenames: | ||
parsedSerializable = XmlSerializeParser.XmlSerializeParser(BUILD_ROOT + '/' + filename) | ||
for build_root in get_build_roots(): | ||
if os.path.exists(os.path.join(build_root, filename)): | ||
break | ||
else: | ||
raise FileNotFoundError(os.path.join(build_root, filename)) | ||
|
||
parsedSerializable = XmlSerializeParser.XmlSerializeParser( | ||
os.path.join(build_root, filename) | ||
) | ||
name = parsedSerializable.get_name() | ||
namespace = parsedSerializable.get_namespace() | ||
members = [] | ||
membersRaw = parsedSerializable.get_members() | ||
for member in membersRaw: | ||
members.append({ | ||
"name": member[0], | ||
"type": format_type_item(member[1]), | ||
"size": member[2], | ||
"format_string": member[3], | ||
"comment": member[4] | ||
}) | ||
members.append( | ||
{ | ||
"name": member[0], | ||
"type": format_type_item(member[1]), | ||
"size": member[2], | ||
"format_string": member[3], | ||
"comment": member[4], | ||
} | ||
) | ||
|
||
metadata = { | ||
"name": name, | ||
|
@@ -163,7 +190,7 @@ def main(): | |
comp_namespace = inst.get_namespace() | ||
component = "::".join([comp_namespace, comp_type]) | ||
base_id = inst.get_base_id() | ||
if '0x' in base_id: | ||
if "0x" in base_id: | ||
base_id = int(base_id, 16) | ||
else: | ||
base_id = int(base_id) | ||
|
@@ -173,7 +200,7 @@ def main(): | |
if "get_commands" in comp_dir: | ||
for command in comp_parser.get_commands(): | ||
opcode = command.get_opcodes()[0] | ||
opcode = int(opcode, 16) if ('0x' in opcode) else int(opcode) | ||
opcode = int(opcode, 16) if ("0x" in opcode) else int(opcode) | ||
opcode += base_id | ||
|
||
name = command.get_mnemonic() | ||
|
@@ -194,15 +221,15 @@ def main(): | |
"instance": comp_name, | ||
"description": command.get_comment(), | ||
"component": component, | ||
"arguments" : arguments | ||
"arguments": arguments, | ||
} | ||
|
||
commands[opcode] = metadata | ||
|
||
if "get_events" in comp_dir: | ||
for event in comp_parser.get_events(): | ||
ev_id = event.get_ids()[0] | ||
ev_id = int(ev_id, 16) if ('0x' in ev_id) else int(ev_id) | ||
ev_id = int(ev_id, 16) if ("0x" in ev_id) else int(ev_id) | ||
ev_id += base_id | ||
|
||
name = event.get_name() | ||
|
@@ -219,22 +246,22 @@ def main(): | |
|
||
metadata = { | ||
"id": ev_id, | ||
"description": event.get_comment(), | ||
"description": event.get_comment(), | ||
"name": name, | ||
"instance": comp_name, | ||
"component": component, | ||
"format_string": event.get_format_string(), | ||
"severity": event.get_severity(), | ||
"telem_type": "event", | ||
"arguments": arguments | ||
"arguments": arguments, | ||
} | ||
|
||
events[ev_id] = metadata | ||
|
||
if "get_channels" in comp_dir: | ||
for channel in comp_parser.get_channels(): | ||
ch_id = channel.get_ids()[0] | ||
ch_id = int(ch_id, 16) if ('0x' in ch_id) else int(ch_id) | ||
ch_id = int(ch_id, 16) if ("0x" in ch_id) else int(ch_id) | ||
ch_id += base_id | ||
|
||
name = channel.get_name() | ||
|
@@ -248,16 +275,15 @@ def main(): | |
units.append(dict(list(zip(unitLabels, unit)))) | ||
|
||
typeObj = channel.get_type() | ||
type_name = '' | ||
type_name = "" | ||
if isinstance(typeObj, str): | ||
type_name = typeObj | ||
else: | ||
type_name = 'Enum' | ||
type_name = "Enum" | ||
enum_dict = {} | ||
for (i, enum) in enumerate(typeObj[1]): | ||
enum_dict[str(i)] = enum[0] | ||
|
||
|
||
metadata = { | ||
"id": ch_id, | ||
"name": name, | ||
|
@@ -266,18 +292,18 @@ def main(): | |
"telem_type": "channel", | ||
"component": component, | ||
"format_string": channel.get_format_string(), | ||
"limits" : dict(list(zip(limitLabels, channel.get_limits()))), | ||
"limits": dict(list(zip(limitLabels, channel.get_limits()))), | ||
"type": type_name, | ||
"units": units | ||
"units": units, | ||
} | ||
|
||
if (type_name == "Enum"): | ||
if type_name == "Enum": | ||
metadata["enum_dict"] = enum_dict | ||
metadata["format_string"] = "%s" | ||
|
||
channels[ch_id] = metadata | ||
|
||
#Prepend instance name to commands, events, and channels with duplicate component types | ||
# Prepend instance name to commands, events, and channels with duplicate component types | ||
# PRINT.info(json.dumps(instanceIDs, indent=4)) | ||
for telemetryType, idDict in list(instanceIDs.items()): | ||
for name, ids in list(idDict.items()): | ||
|
@@ -299,12 +325,13 @@ def main(): | |
os.makedirs(directory) | ||
|
||
# Write JSON to file | ||
outFile = open(outFilepath, 'w') | ||
outFile = open(outFilepath, "w") | ||
outFile.write(jsonStr) | ||
descriptionFile = open(descriptionFilename, 'w') | ||
descriptionFile = open(descriptionFilename, "w") | ||
descriptionFile.write(outFilepath) | ||
PRINT.info("\nJSON output written to %s" % outFilepath) | ||
outFile.close() | ||
|
||
if __name__ == '__main__': | ||
|
||
if __name__ == "__main__": | ||
main() |
Oops, something went wrong.