diff --git a/stix_shifter_modules/securonix/.idea/.gitignore b/stix_shifter_modules/securonix/.idea/.gitignore
new file mode 100644
index 000000000..26d33521a
--- /dev/null
+++ b/stix_shifter_modules/securonix/.idea/.gitignore
@@ -0,0 +1,3 @@
+# Default ignored files
+/shelf/
+/workspace.xml
diff --git a/stix_shifter_modules/securonix/.idea/inspectionProfiles/Project_Default.xml b/stix_shifter_modules/securonix/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 000000000..1a0d74dd0
--- /dev/null
+++ b/stix_shifter_modules/securonix/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,121 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/.idea/inspectionProfiles/profiles_settings.xml b/stix_shifter_modules/securonix/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 000000000..105ce2da2
--- /dev/null
+++ b/stix_shifter_modules/securonix/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/.idea/misc.xml b/stix_shifter_modules/securonix/.idea/misc.xml
new file mode 100644
index 000000000..2206da316
--- /dev/null
+++ b/stix_shifter_modules/securonix/.idea/misc.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/.idea/modules.xml b/stix_shifter_modules/securonix/.idea/modules.xml
new file mode 100644
index 000000000..af0c23454
--- /dev/null
+++ b/stix_shifter_modules/securonix/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/.idea/securonix.iml b/stix_shifter_modules/securonix/.idea/securonix.iml
new file mode 100644
index 000000000..5ed0139f2
--- /dev/null
+++ b/stix_shifter_modules/securonix/.idea/securonix.iml
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/entry_point.py b/stix_shifter_modules/securonix/entry_point.py
index 022189248..31a11d709 100644
--- a/stix_shifter_modules/securonix/entry_point.py
+++ b/stix_shifter_modules/securonix/entry_point.py
@@ -1,17 +1,17 @@
from stix_shifter_utils.utils.base_entry_point import BaseEntryPoint
-class EntryPoint(BaseEntryPoint):
+class EntryPoint ( BaseEntryPoint ) :
- def __init__(self, connection={}, configuration={}, options={}):
- super().__init__(connection, configuration, options)
- self.set_async(True)
+ def __init__ ( self , connection={} , configuration={} , options={} ) :
+ super ( ).__init__ ( connection , configuration , options )
+ self.set_async ( True )
- if connection:
- self.setup_transmission_basic(connection, configuration)
+ if connection :
+ self.setup_transmission_basic ( connection , configuration )
- self.add_dialect('default', default=True)
+ self.add_dialect ( 'default' , default = True )
- def get_translated_queries(self, data, query, options={}):
+ def get_translated_queries ( self , data , query , options={} ) :
# This returns the raw query, bypassing the STIX parsing
- return [query]
\ No newline at end of file
+ return [ query ]
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/stix_translation/json/from_stix_map.json b/stix_shifter_modules/securonix/stix_translation/json/from_stix_map.json
index cd5c479ee..81ff283fb 100644
--- a/stix_shifter_modules/securonix/stix_translation/json/from_stix_map.json
+++ b/stix_shifter_modules/securonix/stix_translation/json/from_stix_map.json
@@ -1,17 +1,61 @@
{
- "x-oca-event": {
- "fields": {
- "action": ["action"],
- "description": ["description"],
- "category": ["category"],
- "severity": ["severity"],
- "created": ["eventtime"],
- "user_ref.account_login": ["username"]
+ "x-oca-event": {
+ "fields": {
+ "action": ["action"],
+ "description": ["description"],
+ "category": ["category"],
+ "severity": ["severity"],
+ "created": ["eventtime"],
+ "user_ref.account_login" : ["username"],
+ "timeline_by_month": ["timeline_by_month"],
+ "rg_timezoneoffset": ["rg_timezoneoffset"],
+ "resourcegroupname": ["resourcegroupname"],
+ "eventid": ["eventid"],
+ "ipaddress": ["ipaddress"],
+ "week": ["week"],
+ "year": ["year"],
+ "accountresourcekey": ["accountresourcekey"],
+ "resourcehostname": ["resourcehostname"],
+ "sourceprocessname": ["sourceprocessname"],
+ "rg_functionality": ["rg_functionality"],
+ "userid": ["userid"],
+ "customfield2": ["customfield2"],
+ "dayofmonth": ["dayofmonth"],
+ "jobid": ["jobid"],
+ "resourcegroupid": ["resourcegroupid"],
+ "datetime": ["datetime"],
+ "timeline_by_hour": ["timeline_by_hour"],
+ "collectiontimestamp": ["collectiontimestamp"],
+ "hour": ["hour"],
+ "accountname": ["accountname"],
+ "tenantid": ["tenantid"],
+ "id": ["id"],
+ "rg_resourcetypeid": ["rg_resourcetypeid"],
+ "_indexed_at_tdt": ["_indexed_at_tdt"],
+ "timeline_by_minute": ["timeline_by_minute"],
+ "routekey": ["routekey"],
+ "collectionmethod": ["collectionmethod"],
+ "receivedtime": ["receivedtime"],
+ "publishedtime": ["publishedtime"],
+ "categorizedtime": ["categorizedtime"],
+ "jobstarttime": ["jobstarttime"],
+ "dayofyear": ["dayofyear"],
+ "minute": ["minute"],
+ "categoryseverity": ["categoryseverity"],
+ "rg_vendor": ["rg_vendor"],
+ "month": ["month"],
+ "version": ["version"],
+ "timeline": ["timeline"],
+ "dayofweek": ["dayofweek"],
+ "timeline_by_week": ["timeline_by_week"],
+ "tenantname": ["tenantname"],
+ "resourcename": ["resourcename"],
+ "ingestionnodeid": ["ingestionnodeid"]
+ }
+ },
+ "ipv4-addr": {
+ "fields": {
+ "value": ["sourceip", "destinationip"]
+ }
}
- },
- "ipv4-addr": {
- "fields": {
- "value": ["sourceip", "destinationip"]
- }
- }
}
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/stix_translation/json/to_stix_map.json b/stix_shifter_modules/securonix/stix_translation/json/to_stix_map.json
index 7b9d741c9..41dc555bd 100644
--- a/stix_shifter_modules/securonix/stix_translation/json/to_stix_map.json
+++ b/stix_shifter_modules/securonix/stix_translation/json/to_stix_map.json
@@ -1,34 +1,210 @@
{
- "action": {
- "key": "x-oca-event.action",
- "object": "x-oca-event"
- },
- "description": {
- "key": "x-oca-event.description",
- "object": "x-oca-event"
- },
- "category": {
- "key": "x-oca-event.category",
- "object": "x-oca-event"
- },
- "severity": {
- "key": "x-oca-event.severity",
- "object": "x-oca-event"
- },
- "eventtime": {
- "key": "x-oca-event.created",
- "object": "x-oca-event"
- },
- "username": {
- "key": "x-oca-event.user_ref.account_login",
- "object": "x-oca-event"
- },
- "sourceip": {
- "key": "ipv4-addr.value",
- "object": "ipv4-addr"
- },
- "destinationip": {
- "key": "ipv4-addr.value",
- "object": "ipv4-addr"
- }
+ "action": {
+ "key": "x-oca-event.action",
+ "object": "x-oca-event"
+ },
+ "description": {
+ "key": "x-oca-event.description",
+ "object": "x-oca-event"
+ },
+ "category": {
+ "key": "x-oca-event.category",
+ "object": "x-oca-event"
+ },
+ "severity": {
+ "key": "x-oca-event.severity",
+ "object": "x-oca-event"
+ },
+ "eventtime": {
+ "key": "x-oca-event.created",
+ "object": "x-oca-event"
+ },
+ "username": {
+ "key": "x-oca-event.user_ref.account_login",
+ "object": "x-oca-event"
+ },
+ "sourceip": {
+ "key": "ipv4-addr.value",
+ "object": "ipv4-addr"
+ },
+ "destinationip": {
+ "key": "ipv4-addr.value",
+ "object": "ipv4-addr"
+ },
+ "timeline_by_month": {
+ "key": "x-oca-event.timeline_by_month",
+ "object": "x-oca-event"
+ },
+ "rg_timezoneoffset": {
+ "key": "x-oca-event.rg_timezoneoffset",
+ "object": "x-oca-event"
+ },
+ "resourcegroupname": {
+ "key": "x-oca-event.resourcegroupname",
+ "object": "x-oca-event"
+ },
+ "eventid": {
+ "key": "x-oca-event.eventid",
+ "object": "x-oca-event"
+ },
+ "ipaddress": {
+ "key": "x-oca-event.ipaddress",
+ "object": "x-oca-event"
+ },
+ "week": {
+ "key": "x-oca-event.week",
+ "object": "x-oca-event"
+ },
+ "year": {
+ "key": "x-oca-event.year",
+ "object": "x-oca-event"
+ },
+ "accountresourcekey": {
+ "key": "x-oca-event.accountresourcekey",
+ "object": "x-oca-event"
+ },
+ "resourcehostname": {
+ "key": "x-oca-event.resourcehostname",
+ "object": "x-oca-event"
+ },
+ "sourceprocessname": {
+ "key": "x-oca-event.sourceprocessname",
+ "object": "x-oca-event"
+ },
+ "rg_functionality": {
+ "key": "x-oca-event.rg_functionality",
+ "object": "x-oca-event"
+ },
+ "userid": {
+ "key": "x-oca-event.userid",
+ "object": "x-oca-event"
+ },
+ "customfield2": {
+ "key": "x-oca-event.customfield2",
+ "object": "x-oca-event"
+ },
+ "dayofmonth": {
+ "key": "x-oca-event.dayofmonth",
+ "object": "x-oca-event"
+ },
+ "jobid": {
+ "key": "x-oca-event.jobid",
+ "object": "x-oca-event"
+ },
+ "resourcegroupid": {
+ "key": "x-oca-event.resourcegroupid",
+ "object": "x-oca-event"
+ },
+ "datetime": {
+ "key": "x-oca-event.datetime",
+ "object": "x-oca-event"
+ },
+ "timeline_by_hour": {
+ "key": "x-oca-event.timeline_by_hour",
+ "object": "x-oca-event"
+ },
+ "collectiontimestamp": {
+ "key": "x-oca-event.collectiontimestamp",
+ "object": "x-oca-event"
+ },
+ "hour": {
+ "key": "x-oca-event.hour",
+ "object": "x-oca-event"
+ },
+ "accountname": {
+ "key": "x-oca-event.accountname",
+ "object": "x-oca-event"
+ },
+ "tenantid": {
+ "key": "x-oca-event.tenantid",
+ "object": "x-oca-event"
+ },
+ "id": {
+ "key": "x-oca-event.id",
+ "object": "x-oca-event"
+ },
+ "rg_resourcetypeid": {
+ "key": "x-oca-event.rg_resourcetypeid",
+ "object": "x-oca-event"
+ },
+ "_indexed_at_tdt": {
+ "key": "x-oca-event._indexed_at_tdt",
+ "object": "x-oca-event"
+ },
+ "timeline_by_minute": {
+ "key": "x-oca-event.timeline_by_minute",
+ "object": "x-oca-event"
+ },
+ "routekey": {
+ "key": "x-oca-event.routekey",
+ "object": "x-oca-event"
+ },
+ "collectionmethod": {
+ "key": "x-oca-event.collectionmethod",
+ "object": "x-oca-event"
+ },
+ "receivedtime": {
+ "key": "x-oca-event.receivedtime",
+ "object": "x-oca-event"
+ },
+ "publishedtime": {
+ "key": "x-oca-event.publishedtime",
+ "object": "x-oca-event"
+ },
+ "categorizedtime": {
+ "key": "x-oca-event.categorizedtime",
+ "object": "x-oca-event"
+ },
+ "jobstarttime": {
+ "key": "x-oca-event.jobstarttime",
+ "object": "x-oca-event"
+ },
+ "dayofyear": {
+ "key": "x-oca-event.dayofyear",
+ "object": "x-oca-event"
+ },
+ "minute": {
+ "key": "x-oca-event.minute",
+ "object": "x-oca-event"
+ },
+ "categoryseverity": {
+ "key": "x-oca-event.categoryseverity",
+ "object": "x-oca-event"
+ },
+ "rg_vendor": {
+ "key": "x-oca-event.rg_vendor",
+ "object": "x-oca-event"
+ },
+ "month": {
+ "key": "x-oca-event.month",
+ "object": "x-oca-event"
+ },
+ "version": {
+ "key": "x-oca-event.version",
+ "object": "x-oca-event"
+ },
+ "timeline": {
+ "key": "x-oca-event.timeline",
+ "object": "x-oca-event"
+ },
+ "dayofweek": {
+ "key": "x-oca-event.dayofweek",
+ "object": "x-oca-event"
+ },
+ "timeline_by_week": {
+ "key": "x-oca-event.timeline_by_week",
+ "object": "x-oca-event"
+ },
+ "tenantname": {
+ "key": "x-oca-event.tenantname",
+ "object": "x-oca-event"
+ },
+ "resourcename": {
+ "key": "x-oca-event.resourcename",
+ "object": "x-oca-event"
+ },
+ "ingestionnodeid": {
+ "key": "x-oca-event.ingestionnodeid",
+ "object": "x-oca-event"
+ }
}
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/stix_translation/query_constructor.py b/stix_shifter_modules/securonix/stix_translation/query_constructor.py
index de5ea1842..d2ffacae7 100644
--- a/stix_shifter_modules/securonix/stix_translation/query_constructor.py
+++ b/stix_shifter_modules/securonix/stix_translation/query_constructor.py
@@ -1,174 +1,142 @@
-from stix_shifter_utils.stix_translation.src.patterns.pattern_objects import ObservationExpression, \
- ComparisonExpression, \
- ComparisonExpressionOperators, ComparisonComparators, Pattern, \
- CombinedComparisonExpression, CombinedObservationExpression, ObservationOperators, StartStopQualifier, SetValue
-from datetime import datetime, timedelta
+from stix_shifter_utils.stix_translation.src.patterns.pattern_objects import ObservationExpression , \
+ ComparisonExpression , \
+ ComparisonExpressionOperators , ComparisonComparators , Pattern , \
+ CombinedComparisonExpression , CombinedObservationExpression , ObservationOperators , StartStopQualifier , SetValue
+from datetime import datetime , timedelta
import time
-class SecuronixQueryStringPatternTranslator:
- QUERIES = []
+class SecuronixQueryStringPatternTranslator :
+ QUERIES = [ ]
"""
Stix to Securonix query translation
"""
- def __init__(self, pattern: Pattern, data_model_mapper, time_range):
+ def __init__ ( self , pattern: Pattern , data_model_mapper , time_range ) :
# self.logger = logger.set_logger(__name__)
self.dmm = data_model_mapper
- self.comparator_lookup = self.dmm.map_comparator()
+ self.comparator_lookup = self.dmm.map_comparator ( )
self.pattern = pattern
self.time_range = time_range # filter results to last x minutes
- self.translated = self.parse_expression(pattern)
- self.queries = []
- self.queries.extend(self.translated)
+ self.translated = self.parse_expression ( pattern )
+ self.queries = [ ]
+ self.queries.extend ( self.translated )
@staticmethod
- def _escape_value(value, comparator=None) -> str:
- if isinstance(value, str):
- return '{}'.format(
- value.replace('\\', '\\\\').replace('\"', '\\"').replace('(', '\\(').replace(')', '\\)').replace(
- ' ', '\\ '))
- else:
+ def _escape_value ( value , comparator=None ) -> str :
+ if isinstance ( value , str ) :
+ return '{}'.format (
+ value.replace ( '\\' , '\\\\' ).replace ( '\"' , '\\"' ).replace ( '(' , '\\(' ).replace ( ')' ,
+ '\\)' ).replace (
+ ' ' , '\\ ' ) )
+ else :
return value
@staticmethod
- def _to_securonix_timestamp(ts: str) -> str:
- stripped = ts[2:-2]
- if '.' in stripped:
- stripped = stripped.split('.', 1)[0]
- return str(int(datetime.fromisoformat(stripped).timestamp()))
+ def _to_securonix_timestamp ( ts: str ) -> str :
+ stripped = ts[ 2 :-2 ]
+ if '.' in stripped :
+ stripped = stripped.split ( '.' , 1 )[ 0 ]
+ return str ( int ( datetime.fromisoformat ( stripped ).timestamp ( ) ) )
- def _format_start_stop_qualifier(self, expression, qualifier: StartStopQualifier) -> str:
- start = self._to_securonix_timestamp(qualifier.start)
- stop = self._to_securonix_timestamp(qualifier.stop)
+ def _format_start_stop_qualifier ( self , expression , qualifier: StartStopQualifier ) -> str :
+ start = self._to_securonix_timestamp ( qualifier.start )
+ stop = self._to_securonix_timestamp ( qualifier.stop )
- start_stop_query = "(eventtime >= {} AND eventtime <= {})".format(start, stop)
+ start_stop_query = "(eventtime >= {} AND eventtime <= {})".format ( start , stop )
- return "({}) AND {}".format(expression, start_stop_query)
+ return "({}) AND {}".format ( expression , start_stop_query )
- def _parse_mapped_fields(self, value, comparator, mapped_fields_array) -> str:
+ def _parse_mapped_fields ( self , value , comparator , mapped_fields_array ) -> str :
"""Convert a list of mapped fields into a query string."""
comp_str = ""
- comparison_strings = []
-
- if isinstance(value, str):
- value = [value]
- if isinstance(value, int):
- value = [value]
- for val in value:
- for mapped_field in mapped_fields_array:
- comparison_strings.append(f"{mapped_field} {comparator} '{val}'")
-
- if len(comparison_strings) == 1:
- comp_str = comparison_strings[0]
- elif len(comparison_strings) > 1:
- comp_str = f"({' OR '.join(comparison_strings)})"
- else:
- raise RuntimeError((f'Failed to convert {mapped_fields_array} mapped fields into query string'))
+ comparison_strings = [ ]
+
+ if isinstance ( value , str ) :
+ value = [ value ]
+ if isinstance ( value , int ) :
+ value = [ value ]
+ for val in value :
+ for mapped_field in mapped_fields_array :
+ comparison_strings.append ( f"{mapped_field} {comparator} '{val}'" )
+
+ if len ( comparison_strings ) == 1 :
+ comp_str = comparison_strings[ 0 ]
+ elif len ( comparison_strings ) > 1 :
+ comp_str = f"({' OR '.join ( comparison_strings )})"
+ else :
+ raise RuntimeError ( (f'Failed to convert {mapped_fields_array} mapped fields into query string') )
return comp_str
- def _parse_expression(self, expression, qualifier=None):
- if isinstance(expression, ComparisonExpression):
+ def _parse_expression ( self , expression , qualifier=None ) :
+ if isinstance ( expression , ComparisonExpression ) :
# Base Case
# Resolve STIX Object Path to a field in the target Data Model
- stix_object, stix_field = expression.object_path.split(':')
+ stix_object , stix_field = expression.object_path.split ( ':' )
- mapped_fields_array = self.dmm.map_field(stix_object, stix_field)
+ mapped_fields_array = self.dmm.map_field ( stix_object , stix_field )
query_string = ""
- comparator = self.comparator_lookup[str(expression.comparator)]
- if expression.negated and expression.comparator == ComparisonComparators.Equal:
- comparator = self._get_negate_comparator()
- value = self._escape_value(expression.value)
- elif expression.comparator == ComparisonComparators.NotEqual and not expression.negated:
- comparator = self._get_negate_comparator()
- value = self._escape_value(expression.value)
+ comparator = self.comparator_lookup[ str ( expression.comparator ) ]
+ if expression.negated and expression.comparator == ComparisonComparators.Equal :
+ comparator = self._get_negate_comparator ( )
+ value = self._escape_value ( expression.value )
+ elif expression.comparator == ComparisonComparators.NotEqual and not expression.negated :
+ comparator = self._get_negate_comparator ( )
+ value = self._escape_value ( expression.value )
elif (expression.comparator == ComparisonComparators.In and
- isinstance(expression.value, SetValue)):
- value = list(map(self._escape_value, expression.value.element_iterator()))
- else:
- value = self._escape_value(expression.value)
-
- query_string = self._parse_mapped_fields(
- value=value,
- comparator=comparator,
- mapped_fields_array=mapped_fields_array
+ isinstance ( expression.value , SetValue )) :
+ value = list ( map ( self._escape_value , expression.value.element_iterator ( ) ) )
+ else :
+ value = self._escape_value ( expression.value )
+
+ query_string = self._parse_mapped_fields (
+ value = value ,
+ comparator = comparator ,
+ mapped_fields_array = mapped_fields_array
)
- if qualifier is not None:
- if isinstance(qualifier, StartStopQualifier):
- query_string = self._format_start_stop_qualifier(query_string, qualifier)
- else:
- raise RuntimeError("Unknown Qualifier: {}".format(qualifier))
+ if qualifier is not None :
+ if isinstance ( qualifier , StartStopQualifier ) :
+ query_string = self._format_start_stop_qualifier ( query_string , qualifier )
+ else :
+ raise RuntimeError ( "Unknown Qualifier: {}".format ( qualifier ) )
- return '({})'.format(query_string)
+ return '({})'.format ( query_string )
- elif isinstance(expression, CombinedComparisonExpression):
+ elif isinstance ( expression , CombinedComparisonExpression ) :
# Wrap nested combined comparison expressions in parentheses
- f1 = "({})" if isinstance(expression.expr2, CombinedComparisonExpression) else "{}"
- f2 = "({})" if isinstance(expression.expr1, CombinedComparisonExpression) else "{}"
-
- query_string = (f1 + " {} " + f2).format(self._parse_expression(expression.expr2),
- self.comparator_lookup[str(expression.operator)],
- self._parse_expression(expression.expr1))
- if qualifier is not None:
- if isinstance(qualifier, StartStopQualifier):
- return self._format_start_stop_qualifier(query_string, qualifier)
- else:
- raise RuntimeError("Unknown Qualifier: {}".format(qualifier))
- else:
- return "{}".format(query_string)
- elif isinstance(expression, ObservationExpression):
- query_string = self._parse_expression(expression.comparison_expression, qualifier=qualifier)
+ f1 = "({})" if isinstance ( expression.expr2 , CombinedComparisonExpression ) else "{}"
+ f2 = "({})" if isinstance ( expression.expr1 , CombinedComparisonExpression ) else "{}"
+
+ query_string = (f1 + " {} " + f2).format ( self._parse_expression ( expression.expr2 ) ,
+ self.comparator_lookup[ str ( expression.operator ) ] ,
+ self._parse_expression ( expression.expr1 ) )
+ if qualifier is not None :
+ if isinstance ( qualifier , StartStopQualifier ) :
+ return self._format_start_stop_qualifier ( query_string , qualifier )
+ else :
+ raise RuntimeError ( "Unknown Qualifier: {}".format ( qualifier ) )
+ else :
+ return "{}".format ( query_string )
+ elif isinstance ( expression , ObservationExpression ) :
+ query_string = self._parse_expression ( expression.comparison_expression , qualifier = qualifier )
return query_string
- elif isinstance(expression, CombinedObservationExpression):
- expr1 = self._parse_expression(expression.expr1, qualifier=qualifier)
- expr2 = self._parse_expression(expression.expr2, qualifier=qualifier)
- if (not isinstance(expr1, list)):
- SecuronixQueryStringPatternTranslator.QUERIES.extend([expr1])
- if (not isinstance(expr2, list)):
- SecuronixQueryStringPatternTranslator.QUERIES.extend([expr2])
+ elif isinstance ( expression , CombinedObservationExpression ) :
+ expr1 = self._parse_expression ( expression.expr1 , qualifier = qualifier )
+ expr2 = self._parse_expression ( expression.expr2 , qualifier = qualifier )
+ if (not isinstance ( expr1 , list )) :
+ SecuronixQueryStringPatternTranslator.QUERIES.extend ( [ expr1 ] )
+ if (not isinstance ( expr2 , list )) :
+ SecuronixQueryStringPatternTranslator.QUERIES.extend ( [ expr2 ] )
return SecuronixQueryStringPatternTranslator.QUERIES
- elif isinstance(expression, Pattern):
- return self._parse_expression(expression.expression)
- elif hasattr(expression, 'qualifier') and hasattr(expression, 'observation_expression'):
- return self._parse_expression(expression.observation_expression, expression)
- else:
- raise RuntimeError("Unknown Recursion Case for expression={}, type(expression)={}".format(
- expression, type(expression)))
-
- def _get_negate_comparator(self):
- return self.comparator_lookup["ComparisonComparators.NotEqual"]
-
- def _add_default_timerange(self, query):
- if self.time_range and 'eventtime' not in query:
- d = (datetime.today() - timedelta(hours=0, minutes=self.time_range)).timestamp()
- n_query = "(({}) AND eventtime >= {})".format(query, int(d))
- return n_query
-
- return query
-
- def _add_default_timerange_to_queries(self, queries):
- n_queries = list()
- if not isinstance(queries, list):
- queries = [queries]
- for q in queries:
- n_queries.append(self._add_default_timerange(q))
-
- return n_queries
-
- def parse_expression(self, pattern: Pattern):
- queries = self._parse_expression(pattern)
- return self._add_default_timerange_to_queries(queries)
-
-
-def translate_pattern(pattern: Pattern, data_model_mapping, options):
- time_range = options['time_range']
-
- translated_statements_lst = SecuronixQueryStringPatternTranslator(pattern, data_model_mapping, time_range)
-
- now = int(time.time())
- past_24_hours = now - 86400
- translated_statements = f"index=activity AND tenantname=\"All Tenants\"" + " ".join(
- translated_statements_lst.queries)
- return translated_statements
\ No newline at end of file
+ elif isinstance ( expression , Pattern ) :
+ return self._parse_expression ( expression.expression )
+ elif hasattr ( expression , 'qualifier' ) and hasattr ( expression , 'observation_expression' ) :
+ return self._parse_expression ( expression.observation_expression , expression )
+ else :
+ raise RuntimeError ( "Unknown Recursion Case for expression={}, type(expression)={}".format (
+ expression , type ( expression ) ) )
+
+ def _get_negate_comparator ( self ) :
+ return self.comparator_lookup["ComparisonComparators.Not"]
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/stix_transmission/api_client.py b/stix_shifter_modules/securonix/stix_transmission/api_client.py
index d7b361563..75c44651b 100644
--- a/stix_shifter_modules/securonix/stix_transmission/api_client.py
+++ b/stix_shifter_modules/securonix/stix_transmission/api_client.py
@@ -2,12 +2,13 @@
import requests
from urllib.parse import urlencode
from stix_shifter_utils.stix_transmission.utils.RestApiClientAsync import RestApiClientAsync
-from datetime import datetime, timedelta
+from datetime import datetime , timedelta
from stix_shifter_utils.utils import logger
+import time
-class APIResponseException(Exception):
- def __init__(self, error_code, error_message, content_header_type, response):
+class APIResponseException ( Exception ) :
+ def __init__ ( self , error_code , error_message , content_header_type , response ) :
self.error_code = error_code
self.error_message = error_message
self.content_header_type = content_header_type
@@ -16,109 +17,114 @@ def __init__(self, error_code, error_message, content_header_type, response):
pass
-class APIClient:
+class APIClient :
TOKEN_ENDPOINT = '/Snypr/ws/token/generate'
SEARCH_ENDPOINT = '/Snypr/ws/spotter/index/search'
- logger = logger.set_logger(__name__)
+ logger = logger.set_logger ( __name__ )
"""API Client to handle all calls."""
- def __init__(self, connection, configuration):
+ def __init__ ( self , connection , configuration ) :
"""Initialization.
:param connection: dict, connection dict
:param configuration: dict,config dict"""
- headers = dict()
- self.client = RestApiClientAsync(connection.get('host'), None, headers)
+ headers = dict ( )
+ self.client = RestApiClientAsync ( connection.get ( 'host' ) , None , headers )
- self.timeout = connection['options'].get('timeout')
+ self.timeout = connection[ 'options' ].get ( 'timeout' )
- self.headers = dict()
- self.headers['Content-Type'] = 'application/json'
- self.headers['Accept'] = '*/*'
- self.headers['user-agent'] = 'oca_stixshifter_1.0'
+ self.headers = dict ( )
+ self.headers[ 'Content-Type' ] = 'application/json'
+ self.headers[ 'Accept' ] = '*/*'
+ self.headers[ 'user-agent' ] = 'oca_stixshifter_1.0'
- self.auth_headers = dict()
- self.auth_headers['Content-Type'] = 'application/json'
- self.auth_headers['user-agent'] = 'oca_stixshifter_1.0'
+ self.auth_headers = dict ( )
+ self.auth_headers[ 'Content-Type' ] = 'application/json'
+ self.auth_headers[ 'user-agent' ] = 'oca_stixshifter_1.0'
- auth = configuration.get('auth')
- self.username = auth["username"]
- self.password = auth["password"]
- self._token_time = datetime.now() - timedelta(days=7)
- self.base_url = connection.get('host')
+ auth = configuration.get ( 'auth' )
+ self.username = auth[ "username" ]
+ self.password = auth[ "password" ]
+ self._token_time = datetime.now ( ) - timedelta ( days = 7 )
+ self.base_url = connection.get ( 'host' )
- async def ping_box(self):
- token = await self.get_token()
+ async def ping_box ( self ) :
+ token = await self.get_token ( )
headers = self.headers
- headers['token'] = token
+ headers[ 'token' ] = token
params = {
- "query": "index=activity"
+ "query" : "index=activity"
}
- try:
- response = requests.get(f"{self.base_url}{self.SEARCH_ENDPOINT}", headers=headers, params=params,
- timeout=self.timeout, verify=False)
-
- response_obj = type('response_obj', (), {})()
+ try :
+ response = requests.get ( f"{self.base_url}{self.SEARCH_ENDPOINT}" , headers = headers , params = params ,
+ timeout = self.timeout , verify = False )
+ response_obj = type ( 'response_obj' , () , {} ) ( )
response_obj.code = response.status_code
response_obj.content = response.content
response_obj.headers = response.headers
return response_obj
- except Exception as e:
- self.logger.error(f"Error during ping box: {e}")
+ except Exception as e :
+ self.logger.error ( f"Error during ping box: {e}" )
raise e
- async def get_securonix_data(self, query):
- token = await self.get_token()
+ async def get_securonix_data ( self , query , queryId=None , count_only=False ) :
+ token = await self.get_token ( )
headers = self.headers
- headers['token'] = token
-
- now = int(datetime.now().timestamp())
- past_24_hours = now - 86400
+ headers[ 'token' ] = token
+ now = datetime.now ( )
+ past_24_hours = now - timedelta ( hours = 24 )
params = {
- "query": query,
- "eventtime_from": past_24_hours,
- "eventtime_to": now
+ "query" : query ,
+ "eventtime_from" : past_24_hours.strftime ( "%m/%d/%Y %H:%M:%S" ) ,
+ "eventtime_to" : now.strftime ( "%m/%d/%Y %H:%M:%S" )
}
- try:
- response = requests.get(f"{self.base_url}{self.SEARCH_ENDPOINT}", headers=headers, params=params,
- timeout=self.timeout, verify=False)
- response_obj = type('response_obj', (), {})()
+ if (queryId != None) :
+ params[ "queryId" ] = queryId
+
+ if (count_only == True) :
+ params[ "count_only" ] = "true"
+
+ try :
+ response = requests.get ( f"{self.base_url}{self.SEARCH_ENDPOINT}" , headers = headers , params = params ,
+ timeout = self.timeout , verify = False )
+
+ response_obj = type ( 'response_obj' , () , {} ) ( )
response_obj.code = response.status_code
response_obj.content = response.content
response_obj.headers = response.headers
return response_obj
- except Exception as e:
- self.logger.error(f"Error getting securonix data: {e}")
+ except Exception as e :
+ self.logger.error ( f"Error getting securonix data: {e}" )
raise e
- async def get_token(self) -> str:
- self.logger.debug(f"Checking if the current token has expired. Token Creation time was {self._token_time}")
- if (datetime.now() - self._token_time) >= timedelta(minutes=30):
- self.logger.debug(f"Attempting to get a new authenctication token")
+ async def get_token ( self ) -> str :
+ self.logger.debug ( f"Checking if the current token has expired. Token Creation time was {self._token_time}" )
+ if (datetime.now ( ) - self._token_time) >= timedelta ( minutes = 30 ) :
+ self.logger.debug ( f"Attempting to get a new authenctication token" )
- self.auth_headers['username'] = self.username
- self.auth_headers['password'] = self.password
- self.auth_headers['validity'] = '365'
+ self.auth_headers[ 'username' ] = self.username
+ self.auth_headers[ 'password' ] = self.password
+ self.auth_headers[ 'validity' ] = '365'
- try:
- response = requests.get(f"{self.base_url}{self.TOKEN_ENDPOINT}", headers=self.auth_headers,
- timeout=self.timeout, verify=False)
+ try :
+ response = requests.get ( f"{self.base_url}{self.TOKEN_ENDPOINT}" , headers = self.auth_headers ,
+ timeout = self.timeout , verify = False )
# A successful response can be 200 or 201.
- if response.status_code >= 200 and response.status_code < 300:
+ if response.status_code >= 200 and response.status_code < 300 :
- self.logger.debug(f"Get authentication token was successful.")
+ self.logger.debug ( f"Get authentication token was successful." )
token_text = response.text
- token = token_text.strip('"')
+ token = token_text.strip ( '"' )
self._token = token
- self._token_time = datetime.now()
+ self._token_time = datetime.now ( )
return token
- else:
- self.logger.debug(f"Get authentication token was not successful.")
- raise APIResponseException(response.status_code, response.text,
- response.headers.get('Content-Type'), response)
- except Exception as e:
- self.logger.error(f"Error getting token: {e}")
+ else :
+ self.logger.debug ( f"Get authentication token was not successful." )
+ raise APIResponseException ( response.status_code , response.text ,
+ response.headers.get ( 'Content-Type' ) , response )
+ except Exception as e :
+ self.logger.error ( f"Error getting token: {e}" )
raise e
return self._token
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/stix_transmission/connector.py b/stix_shifter_modules/securonix/stix_transmission/connector.py
index fe04eb478..5056f1091 100644
--- a/stix_shifter_modules/securonix/stix_transmission/connector.py
+++ b/stix_shifter_modules/securonix/stix_transmission/connector.py
@@ -5,148 +5,158 @@
from stix_shifter_utils.utils import logger
from stix_shifter_modules.securonix.stix_transmission.api_client import APIResponseException
from requests.exceptions import ConnectionError
-from datetime import datetime, timezone
+from datetime import datetime , timezone
-class Connector(BaseJsonSyncConnector):
+class Connector ( BaseJsonSyncConnector ) :
init_error = None
- logger = logger.set_logger(__name__)
+ logger = logger.set_logger ( __name__ )
PROVIDER = 'Securonix'
- def __init__(self, connection, configuration):
- self.connector = __name__.split('.')[1]
- self.api_client = APIClient(connection, configuration)
- self.result_limit = connection['options'].get('result_limit', 1000)
- self.status = dict()
+ def __init__ ( self , connection , configuration ) :
+ self.connector = __name__.split ( '.' )[ 1 ]
+ self.api_client = APIClient ( connection , configuration )
+ self.result_limit = connection[ 'options' ].get ( 'result_limit' , 1000 )
+ self.status = dict ( )
- async def ping_connection(self):
+ async def ping_connection ( self ) :
return_obj = {}
- try:
- self.logger.debug(f"Attempting to ping the service for an auth token")
- response = await self.api_client.ping_box()
+ try :
+ self.logger.debug ( f"Attempting to ping the service for an auth token" )
+ response = await self.api_client.ping_box ( )
response_code = response.code
- response_msg = response.read().decode('utf-8')
- if response_code == 200:
- self.logger.debug(f"Successfully pinged the device for an auth token")
- return_obj['success'] = True
- else:
- response_type = response.headers.get('Content-Type')
- raise APIResponseException(response_code, response_msg, response_type, response)
- except Exception as e:
- return self._handle_Exception(e)
+ response_msg = response.read ( ).decode ( 'utf-8' )
+ if response_code == 200 :
+ self.logger.debug ( f"Successfully pinged the device for an auth token" )
+ return_obj[ 'success' ] = True
+ else :
+ response_type = response.headers.get ( 'Content-Type' )
+ raise APIResponseException ( response_code , response_msg , response_type , response )
+ except Exception as e :
+ return self._handle_Exception ( e )
return return_obj
- async def create_results_connection(self, query, offset, length, metadata=None):
+ async def create_results_connection ( self , query , offset , length , metadata=None ) :
# Initialize the starting offset and initial variables to empty.
- return_obj = dict()
- length = int(length)
- offset = int(offset)
+ return_obj = dict ( )
+ length = int ( length )
+ offset = int ( offset )
- if (metadata == None):
- metadata = dict()
+ if (metadata == None) :
+ metadata = dict ( )
current_offset = offset
- metadata["result_count"] = 0
- else:
- current_offset = metadata["offset"]
+ metadata[ "result_count" ] = 0
+ queryId = None
+ else :
+ current_offset = metadata[ "offset" ]
+ queryId = metadata.get ( 'queryId' )
- try:
+ try :
# Query the alert endpoint to get a list of ID's. The ID's are in a list of list format.
- securonix_data = await self._get_securonix_data(length, query, current_offset, metadata)
- except Exception as e:
- return self._handle_Exception(e)
+ securonix_data = await self._get_securonix_data ( length , query , current_offset , metadata , queryId )
+ except Exception as e :
+ return self._handle_Exception ( e )
# If the total count is greater than the result limit,
- if (metadata["offset"] > self.result_limit):
- securonix_data = securonix_data[0:(self.result_limit - offset) - 1]
+ if (metadata[ "offset" ] > self.result_limit) :
+ securonix_data = securonix_data[ 0 :(self.result_limit - offset) - 1 ]
- if (metadata["result_count"] >= self.result_limit) or len(securonix_data) < length:
+ if (metadata[ "result_count" ] >= self.result_limit) or len ( securonix_data ) < length :
metadata = None
- return_obj["success"] = True
- return_obj["metadata"] = metadata
- return_obj["data"] = securonix_data
+ return_obj[ "success" ] = True
+ return_obj[ "metadata" ] = metadata
+ return_obj[ "data" ] = securonix_data
return return_obj
- async def _get_securonix_data(self, length, query, current_offset, metadata):
- securonix_data = []
- self.logger.debug(f"Collecting results using the following query/filter : {query}")
-
- # Get the next batch of ID's to process. We use length as batch size as this only gets the ID, not the data.
- # 10000 is the maximum amount that can be asked for at once.
- self.logger.debug(
- f"Using the following settings to get a batch of ID's: offset : {current_offset}, length : {length}")
-
- get_data_response = await self.api_client.get_securonix_data(query)
- if get_data_response.code == 200:
- self.logger.debug(f"Successfully got a list of results")
-
- get_data_response_data = get_data_response.read().decode('utf-8')
- self.logger.debug(f"Raw Response from API : {get_data_response_data}")
-
- try:
- get_data_response_json = json.loads(get_data_response_data)
-
- if "results" in get_data_response_json:
- securonix_data = get_data_response_json.get('results')
- metadata["result_count"] = metadata["result_count"] + len(securonix_data)
- else:
- self.logger.warning(
- f"Response did not contain 'results' key. Assuming empty result. Full response: {get_data_response_json}")
-
-
- except json.JSONDecodeError as e:
- self.logger.error(f"Failed to decode JSON: {e}. Full response: {get_data_response_data}")
- raise e
-
- if (len(securonix_data) < length or len(securonix_data) == 0):
- current_offset = current_offset + len(securonix_data)
- else:
- current_offset = current_offset + length
- else:
- raise APIResponseException(get_data_response.code, get_data_response.content,
- get_data_response.headers.get('Content-Type'), get_data_response)
+ async def _get_securonix_data ( self , length , query , current_offset , metadata , queryId ) :
+ securonix_data = [ ]
+ self.logger.debug ( f"Collecting results using the following query/filter : {query}" )
+
+ while (len ( securonix_data ) < length) :
+ # Get the next batch of results.
+ self.logger.debug (
+ f"Using the following settings to get a batch of results: offset : {current_offset}, length : {length}" )
+ get_data_response = await self.api_client.get_securonix_data ( query , queryId )
+ if get_data_response.code == 200 :
+ self.logger.debug ( f"Successfully got a list of results" )
+ get_data_response_data = get_data_response.read ( ).decode ( 'utf-8' )
+ self.logger.debug ( f"Raw Response from API : {get_data_response_data}" )
+ try :
+ get_data_response_json = json.loads ( get_data_response_data )
+ if "results" in get_data_response_json :
+ securonix_data.extend ( get_data_response_json.get ( 'results' ) )
+ metadata[ "result_count" ] = metadata[ "result_count" ] + len (
+ get_data_response_json.get ( 'results' ) )
+ queryId = get_data_response_json.get ( 'queryId' )
+
+ if (queryId != None) :
+ metadata[ 'queryId' ] = queryId
+ else :
+ metadata = None
+
+ else :
+ self.logger.warning (
+ f"Response did not contain 'results' key. Assuming empty result. Full response: {get_data_response_json}" )
+
+ metadata = None
+ break
+ except json.JSONDecodeError as e :
+ self.logger.error ( f"Failed to decode JSON: {e}. Full response: {get_data_response_data}" )
+ raise e
+ if (len ( securonix_data ) == 0) :
+ metadata = None
+ break
+ if (len ( securonix_data ) >= length) :
+ break;
+
+ else :
+ raise APIResponseException ( get_data_response.code , get_data_response.content ,
+ get_data_response.headers.get ( 'Content-Type' ) , get_data_response )
+ current_offset = current_offset + length
# We now know the next meta_data offset
- metadata["offset"] = current_offset
+ metadata[ "offset" ] = current_offset
return securonix_data
- def _handle_Exception(self, exception):
+ def _handle_Exception ( self , exception ) :
response_dict = {}
return_obj = {}
- try:
+ try :
raise exception
- except APIResponseException as ex:
- return self._handle_api_response(ex)
- except Exception as ex:
- ErrorResponder.fill_error(return_obj, response_dict, error=ex, connector=self.connector)
+ except APIResponseException as ex :
+ return self._handle_api_response ( ex )
+ except Exception as ex :
+ ErrorResponder.fill_error ( return_obj , response_dict , error = ex , connector = self.connector )
return return_obj
- def _handle_api_response(self, rest_api_exception):
+ def _handle_api_response ( self , rest_api_exception ) :
response_dict = {}
return_obj = {}
connection_error = None
- if (rest_api_exception.content_header_type == 'application/json'):
- response = json.loads(rest_api_exception.error_message)
- if 'error' in response:
- response_dict['message'] = response['error']
- elif 'message' in response:
- response_dict['message'] = response['message']
- else:
- response_dict['message'] = str(rest_api_exception.error_message)
- if (rest_api_exception.error_code == 400):
- response_dict['type'] = 'ValidationError'
- elif (rest_api_exception.error_code == 401):
- response_dict['type'] = 'AuthenticationError'
- elif (rest_api_exception.error_code == 403):
- response_dict['type'] = 'TokenError'
- elif (rest_api_exception.content_header_type == 'text/html'):
+ if (rest_api_exception.content_header_type == 'application/json') :
+ response = json.loads ( rest_api_exception.error_message )
+ if 'error' in response :
+ response_dict[ 'message' ] = response[ 'error' ]
+ elif 'message' in response :
+ response_dict[ 'message' ] = response[ 'message' ]
+ else :
+ response_dict[ 'message' ] = str ( rest_api_exception.error_message )
+ if (rest_api_exception.error_code == 400) :
+ response_dict[ 'type' ] = 'ValidationError'
+ elif (rest_api_exception.error_code == 401) :
+ response_dict[ 'type' ] = 'AuthenticationError'
+ elif (rest_api_exception.error_code == 403) :
+ response_dict[ 'type' ] = 'TokenError'
+ elif (rest_api_exception.content_header_type == 'text/html') :
response = rest_api_exception.error_message
- connection_error = ConnectionError(f'Error connecting the datasource: {rest_api_exception.error_message}')
- else:
- raise Exception(rest_api_exception.error_message)
+ connection_error = ConnectionError (
+ f'Error connecting the datasource: {rest_api_exception.error_message}' )
+ else :
+ raise Exception ( rest_api_exception.error_message )
- ErrorResponder.fill_error(return_obj, response_dict, ['message'], error=connection_error,
- connector=self.connector)
+ ErrorResponder.fill_error ( return_obj , response_dict , [ 'message' ] , error = connection_error ,
+ connector = self.connector )
return return_obj
\ No newline at end of file
diff --git a/stix_shifter_modules/securonix/supported_stix.md b/stix_shifter_modules/securonix/supported_stix.md
index 28b1f6804..e8b4d1545 100644
--- a/stix_shifter_modules/securonix/supported_stix.md
+++ b/stix_shifter_modules/securonix/supported_stix.md
@@ -9,38 +9,126 @@
### Supported STIX Operators
-| STIX Operator | Data Source Operator |
-|--|--|
-| AND (Comparison) | AND |
-| OR (Comparison) | OR |
-| = | = |
-| != | != |
-| > | > |
-| >= | >= |
-| < | < |
-| <= | <= |
-| IN | IN |
+| STIX Operator | Data Source Operator |
+|------------------|----------------------|
+| AND (Comparison) | AND |
+| OR (Comparison) | OR |
+| = | = |
+| != | != |
+| > | > |
+| >= | >= |
+| < | < |
+| <= | <= |
+| IN | IN |
### Searchable STIX objects and properties
-| STIX Object and Property | Mapped Data Source Fields |
-|---|---|
-| **x-oca-event**:action | action |
-| **x-oca-event**:description | description |
-| **x-oca-event**:category | category |
-| **x-oca-event**:severity | severity |
-| **x-oca-event**:created | eventtime |
-| **x-oca-event**:user_ref.account_login | username |
-| **ipv4-addr**:value | sourceip, destinationip |
+| STIX Object and Property | Mapped Data Source Fields |
+|----------------------------------------|---------------------------|
+| **x-oca-event**:action | action |
+| **x-oca-event**:description | description |
+| **x-oca-event**:category | category |
+| **x-oca-event**:severity | severity |
+| **x-oca-event**:created | eventtime |
+| **x-oca-event**:user_ref.account_login | username |
+| **ipv4-addr**:value | sourceip, destinationip |
+| **x-oca-event**:timeline_by_month | timeline_by_month |
+| **x-oca-event**:rg_timezoneoffset | rg_timezoneoffset |
+| **x-oca-event**:resourcegroupname | resourcegroupname |
+| **x-oca-event**:eventid | eventid |
+| **x-oca-event**:ipaddress | ipaddress |
+| **x-oca-event**:week | week |
+| **x-oca-event**:year | year |
+| **x-oca-event**:accountresourcekey | accountresourcekey |
+| **x-oca-event**:resourcehostname | resourcehostname |
+| **x-oca-event**:sourceprocessname | sourceprocessname |
+| **x-oca-event**:rg_functionality | rg_functionality |
+| **x-oca-event**:userid | userid |
+| **x-oca-event**:customfield2 | customfield2 |
+| **x-oca-event**:dayofmonth | dayofmonth |
+| **x-oca-event**:jobid | jobid |
+| **x-oca-event**:resourcegroupid | resourcegroupid |
+| **x-oca-event**:datetime | datetime |
+| **x-oca-event**:timeline_by_hour | timeline_by_hour |
+| **x-oca-event**:collectiontimestamp | collectiontimestamp |
+| **x-oca-event**:hour | hour |
+| **x-oca-event**:accountname | accountname |
+| **x-oca-event**:tenantid | tenantid |
+| **x-oca-event**:id | id |
+| **x-oca-event**:rg_resourcetypeid | rg_resourcetypeid |
+| **x-oca-event**:_indexed_at_tdt | _indexed_at_tdt |
+| **x-oca-event**:timeline_by_minute | timeline_by_minute |
+| **x-oca-event**:routekey | routekey |
+| **x-oca-event**:collectionmethod | collectionmethod |
+| **x-oca-event**:receivedtime | receivedtime |
+| **x-oca-event**:publishedtime | publishedtime |
+| **x-oca-event**:categorizedtime | categorizedtime |
+| **x-oca-event**:jobstarttime | jobstarttime |
+| **x-oca-event**:dayofyear | dayofyear |
+| **x-oca-event**:minute | minute |
+| **x-oca-event**:categoryseverity | categoryseverity |
+| **x-oca-event**:rg_vendor | rg_vendor |
+| **x-oca-event**:month | month |
+| **x-oca-event**:version | version |
+| **x-oca-event**:timeline | timeline |
+| **x-oca-event**:dayofweek | dayofweek |
+| **x-oca-event**:timeline_by_week | timeline_by_week |
+| **x-oca-event**:tenantname | tenantname |
+| **x-oca-event**:resourcename | resourcename |
+| **x-oca-event**:ingestionnodeid | ingestionnodeid |
### Supported STIX Objects and Properties for Query Results
-| STIX Object | STIX Property | Data Source Field |
-|--|--|--|
-| x-oca-event | action | action |
-| x-oca-event | description | description |
-| x-oca-event | category | category |
-| x-oca-event | severity | severity |
-| x-oca-event | created | eventtime |
-| x-oca-event | user_ref | username |
-| ipv4-addr | value | sourceip |
-| ipv4-addr | value | destinationip |
\ No newline at end of file
+| STIX Object | STIX Property | Data Source Field |
+|-------------|---------------------|---------------------|
+| x-oca-event | action | action |
+| x-oca-event | description | description |
+| x-oca-event | category | category |
+| x-oca-event | severity | severity |
+| x-oca-event | created | eventtime |
+| x-oca-event | user_ref | username |
+| ipv4-addr | value | sourceip |
+| ipv4-addr | value | destinationip |
+| x-oca-event | timeline_by_month | timeline_by_month |
+| x-oca-event | rg_timezoneoffset | rg_timezoneoffset |
+| x-oca-event | resourcegroupname | resourcegroupname |
+| x-oca-event | eventid | eventid |
+| x-oca-event | ipaddress | ipaddress |
+| x-oca-event | week | week |
+| x-oca-event | year | year |
+| x-oca-event | accountresourcekey | accountresourcekey |
+| x-oca-event | resourcehostname | resourcehostname |
+| x-oca-event | sourceprocessname | sourceprocessname |
+| x-oca-event | rg_functionality | rg_functionality |
+| x-oca-event | userid | userid |
+| x-oca-event | customfield2 | customfield2 |
+| x-oca-event | dayofmonth | dayofmonth |
+| x-oca-event | jobid | jobid |
+| x-oca-event | resourcegroupid | resourcegroupid |
+| x-oca-event | datetime | datetime |
+| x-oca-event | timeline_by_hour | timeline_by_hour |
+| x-oca-event | collectiontimestamp | collectiontimestamp |
+| x-oca-event | hour | hour |
+| x-oca-event | accountname | accountname |
+| x-oca-event | tenantid | tenantid |
+| x-oca-event | id | id |
+| x-oca-event | rg_resourcetypeid | rg_resourcetypeid |
+| x-oca-event | _indexed_at_tdt | _indexed_at_tdt |
+| x-oca-event | timeline_by_minute | timeline_by_minute |
+| x-oca-event | routekey | routekey |
+| x-oca-event | collectionmethod | collectionmethod |
+| x-oca-event | receivedtime | receivedtime |
+| x-oca-event | publishedtime | publishedtime |
+| x-oca-event | categorizedtime | categorizedtime |
+| x-oca-event | jobstarttime | jobstarttime |
+| x-oca-event | dayofyear | dayofyear |
+| x-oca-event | minute | minute |
+| x-oca-event | categoryseverity | categoryseverity |
+| x-oca-event | rg_vendor | rg_vendor |
+| x-oca-event | month | month |
+| x-oca-event | version | version |
+| x-oca-event | timeline | timeline |
+| x-oca-event | dayofweek | dayofweek |
+| x-oca-event | timeline_by_week | timeline_by_week |
+| x-oca-event | tenantname | tenantname |
+| x-oca-event | resourcename | resourcename |
+| x-oca-event | ingestionnodeid | ingestionnodeid |
\ No newline at end of file