From c34e6222545c8acebbf755850f296a2d8d0b1a05 Mon Sep 17 00:00:00 2001 From: Oleksandr Volha Date: Wed, 15 Nov 2023 19:05:55 +0200 Subject: [PATCH] parser, tokenizer, render fixes --- .../backends/athena/renders/athena.py | 2 ++ .../converter/backends/athena/tokenizer.py | 8 +++---- .../converter/backends/chronicle/tokenizer.py | 14 +++++------ .../elasticsearch/renders/elasticsearch.py | 2 ++ .../backends/elasticsearch/tokenizer.py | 8 +++---- .../converter/backends/logscale/tokenizer.py | 8 +++---- .../microsoft/renders/microsoft_sentinel.py | 24 ++++++++++--------- .../converter/backends/microsoft/tokenizer.py | 10 ++++---- .../backends/opensearch/renders/opensearch.py | 2 ++ .../backends/opensearch/tokenizer.py | 8 +++---- .../app/converter/backends/qradar/const.py | 2 +- .../backends/qradar/renders/qradar.py | 1 - .../converter/backends/qradar/tokenizer.py | 6 ++--- .../backends/splunk/renders/splunk.py | 4 ---- .../backends/splunk/renders/splunk_alert.py | 2 +- .../converter/backends/splunk/tokenizer.py | 16 ++++++------- .../app/converter/core/models/field.py | 2 +- siem-converter/app/converter/core/render.py | 10 ++++---- .../app/converter/tools/decorators.py | 5 +++- 19 files changed, 71 insertions(+), 63 deletions(-) diff --git a/siem-converter/app/converter/backends/athena/renders/athena.py b/siem-converter/app/converter/backends/athena/renders/athena.py index c584d1e0..44ebba13 100644 --- a/siem-converter/app/converter/backends/athena/renders/athena.py +++ b/siem-converter/app/converter/backends/athena/renders/athena.py @@ -67,6 +67,8 @@ class AthenaQueryRender(BaseQueryRender): field_value_map = AthenaFieldValue(or_token=or_token) query_pattern = "{prefix} WHERE {query} {functions}" + comment_symbol = "--" + is_multi_line_comment = True def generate_prefix(self, log_source_signature: LogSourceSignature) -> str: table = str(log_source_signature) if str(log_source_signature) else "eventlog" diff --git a/siem-converter/app/converter/backends/athena/tokenizer.py b/siem-converter/app/converter/backends/athena/tokenizer.py index eb1abe35..4ce65d6d 100644 --- a/siem-converter/app/converter/backends/athena/tokenizer.py +++ b/siem-converter/app/converter/backends/athena/tokenizer.py @@ -30,7 +30,7 @@ class AthenaTokenizer(QueryTokenizer): match_operator_pattern = r"""(?:___field___\s?(?Plike|in|=|>|<|>=|<=|<>|!=))\s?""" num_value_pattern = r"(?P\d+(?:\.\d+)*)\s*" bool_value_pattern = r"(?Ptrue|false)\s*" - single_quotes_value_pattern = r"""'(?P(?:[:a-zA-Z\*0-9=+%#\-\/\\,_".$&^@!\(\)\{\}\s]|'')+)'""" + single_quotes_value_pattern = r"""'(?P(?:[:a-zA-Z\*0-9=+%#\-\/\\,_".$&^@!\(\)\{\}\s]|'')*)'""" _value_pattern = fr"{num_value_pattern}|{bool_value_pattern}|{single_quotes_value_pattern}" multi_value_pattern = r"""\((?P\d+(?:,\s*\d+)*|'(?:[:a-zA-Z\*0-9=+%#\-\/\\,_".$&^@!\(\)\{\}\s]|'')*'(?:,\s*'(?:[:a-zA-Z\*0-9=+%#\-\/\\,_".$&^@!\(\)\{\}\s]|'')*')*)\)""" @@ -49,13 +49,13 @@ def should_process_value_wildcard_symbols(operator: str) -> bool: return operator.lower() in ("like",) def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.EQ) -> Tuple[str, Any]: - if num_value := get_match_group(match, group_name='num_value'): + if (num_value := get_match_group(match, group_name='num_value')) is not None: return operator, num_value - elif bool_value := get_match_group(match, group_name='bool_value'): + elif (bool_value := get_match_group(match, group_name='bool_value')) is not None: return operator, bool_value - elif s_q_value := get_match_group(match, group_name='s_q_value'): + elif (s_q_value := get_match_group(match, group_name='s_q_value')) is not None: return operator, s_q_value return super().get_operator_and_value(match, operator) diff --git a/siem-converter/app/converter/backends/chronicle/tokenizer.py b/siem-converter/app/converter/backends/chronicle/tokenizer.py index 51bd4ba6..a9427911 100644 --- a/siem-converter/app/converter/backends/chronicle/tokenizer.py +++ b/siem-converter/app/converter/backends/chronicle/tokenizer.py @@ -31,22 +31,22 @@ class ChronicleQueryTokenizer(QueryTokenizer): num_value_pattern = r"(?P\d+(?:\.\d+)*)\s*" bool_value_pattern = r"(?Ptrue|false)\s*" double_quotes_value_pattern = r'"(?P(?:[:a-zA-Z\*0-9=+%#\-_/,\'\.$&^@!\(\)\{\}\s]|\\\"|\\\\)*)"\s*(?:nocase)?' - re_value_pattern = r"/(?P[:a-zA-Z\*0-9=+%#\\\-_\,\"\'\.$&^@!\(\)\{\}\s?]*)/\s*(?:nocase)?" + re_value_pattern = r"/(?P(?:\\\/|[:a-zA-Z\*0-9=+%#\\\-_\,\"\'\.$&^@!\(\)\{\}\s?])+)/\s*(?:nocase)?" _value_pattern = fr"{num_value_pattern}|{bool_value_pattern}|{double_quotes_value_pattern}|{re_value_pattern}" wildcard_symbol = ".*" def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.EQ) -> Tuple[str, Any]: - if num_value := get_match_group(match, group_name='num_value'): + if (num_value := get_match_group(match, group_name='num_value')) is not None: return operator, num_value - elif bool_value := get_match_group(match, group_name='bool_value'): + elif (bool_value := get_match_group(match, group_name='bool_value')) is not None: return operator, bool_value - elif d_q_value := get_match_group(match, group_name='d_q_value'): + elif (d_q_value := get_match_group(match, group_name='d_q_value')) is not None: return operator, d_q_value - elif re_value := get_match_group(match, group_name='re_value'): + elif (re_value := get_match_group(match, group_name='re_value')) is not None: return OperatorType.REGEX, re_value return super().get_operator_and_value(match, operator) @@ -94,10 +94,10 @@ def search_field_value(self, query): return super().search_field_value(query=query) def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.EQ) -> Tuple[str, Any]: - if d_q_value := get_match_group(match, group_name='d_q_value'): + if (d_q_value := get_match_group(match, group_name='d_q_value')) is not None: return operator, d_q_value - elif b_q_value := get_match_group(match, group_name='b_q_value'): + elif (b_q_value := get_match_group(match, group_name='b_q_value')) is not None: return operator, b_q_value return super().get_operator_and_value(match, operator) diff --git a/siem-converter/app/converter/backends/elasticsearch/renders/elasticsearch.py b/siem-converter/app/converter/backends/elasticsearch/renders/elasticsearch.py index 9805ce59..d91aa991 100644 --- a/siem-converter/app/converter/backends/elasticsearch/renders/elasticsearch.py +++ b/siem-converter/app/converter/backends/elasticsearch/renders/elasticsearch.py @@ -82,6 +82,8 @@ class ElasticSearchQueryRender(BaseQueryRender): field_value_map = ElasticSearchFieldValue(or_token=or_token) query_pattern = "{query} {functions}" + comment_symbol = "//" + is_multi_line_comment = True def generate_prefix(self, logsource: dict) -> str: return "" diff --git a/siem-converter/app/converter/backends/elasticsearch/tokenizer.py b/siem-converter/app/converter/backends/elasticsearch/tokenizer.py index 236fd701..069491ab 100644 --- a/siem-converter/app/converter/backends/elasticsearch/tokenizer.py +++ b/siem-converter/app/converter/backends/elasticsearch/tokenizer.py @@ -64,16 +64,16 @@ def clean_quotes(value: Union[str, int]): return value def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.EQ) -> Tuple[str, Any]: - if num_value := get_match_group(match, group_name='num_value'): + if (num_value := get_match_group(match, group_name='num_value')) is not None: return operator, num_value - elif re_value := get_match_group(match, group_name='re_value'): + elif (re_value := get_match_group(match, group_name='re_value')) is not None: return OperatorType.REGEX, re_value - elif n_q_value := get_match_group(match, group_name='n_q_value'): + elif (n_q_value := get_match_group(match, group_name='n_q_value')) is not None: return operator, n_q_value - elif d_q_value := get_match_group(match, group_name='d_q_value'): + elif (d_q_value := get_match_group(match, group_name='d_q_value')) is not None: return operator, d_q_value return super().get_operator_and_value(match) diff --git a/siem-converter/app/converter/backends/logscale/tokenizer.py b/siem-converter/app/converter/backends/logscale/tokenizer.py index 2243141e..3c02dcd6 100644 --- a/siem-converter/app/converter/backends/logscale/tokenizer.py +++ b/siem-converter/app/converter/backends/logscale/tokenizer.py @@ -30,20 +30,20 @@ class LogScaleTokenizer(QueryTokenizer): match_operator_pattern = r"""(?:___field___\s?(?P=|!=))\s?""" num_value_pattern = r"(?P\d+(?:\.\d+)*)\s*" double_quotes_value_pattern = r'"(?P(?:[:a-zA-Z\*0-9=+%#\-_/,\'\.$&^@!\(\)\{\}\s]|\\\"|\\)*)"\s*' - re_value_pattern = r"/(?P[:a-zA-Z\*0-9=+%#\\\-_\,\"\'\.$&^@!\(\)\{\}\s?]*)/i?\s*" + re_value_pattern = r"/(?P[:a-zA-Z\*0-9=+%#\\\-_\,\"\'\.$&^@!\(\)\{\}\s?]+)/i?\s*" _value_pattern = fr"""{num_value_pattern}|{re_value_pattern}|{double_quotes_value_pattern}""" keyword_pattern = double_quotes_value_pattern wildcard_symbol = "*" def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.EQ) -> Tuple[str, Any]: - if num_value := get_match_group(match, group_name='num_value'): + if (num_value := get_match_group(match, group_name='num_value')) is not None: return operator, num_value - elif d_q_value := get_match_group(match, group_name='d_q_value'): + elif (d_q_value := get_match_group(match, group_name='d_q_value')) is not None: return operator, d_q_value - elif re_value := get_match_group(match, group_name='re_value'): + elif (re_value := get_match_group(match, group_name='re_value')) is not None: return OperatorType.REGEX, re_value return super().get_operator_and_value(match, operator) diff --git a/siem-converter/app/converter/backends/microsoft/renders/microsoft_sentinel.py b/siem-converter/app/converter/backends/microsoft/renders/microsoft_sentinel.py index efb16db4..b42f360f 100644 --- a/siem-converter/app/converter/backends/microsoft/renders/microsoft_sentinel.py +++ b/siem-converter/app/converter/backends/microsoft/renders/microsoft_sentinel.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ +from typing import Union from app.converter.backends.microsoft.const import microsoft_sentinel_query_details from app.converter.backends.microsoft.mapping import MicrosoftSentinelMappings, microsoft_sentinel_mappings @@ -28,11 +29,15 @@ class MicrosoftSentinelFieldValue(BaseQueryFieldValue): details: PlatformDetails = microsoft_sentinel_query_details + @staticmethod + def __escape_value(value: Union[int, str]) -> Union[int, str]: + return value.replace("'", "''") if isinstance(value, str) else value + def equal_modifier(self, field, value): if isinstance(value, str): - return f"{field} =~ @'{value}'" + return f"{field} =~ @'{self.__escape_value(value)}'" elif isinstance(value, list): - prepared_values = ", ".join(f"@'{v}'" for v in value) + prepared_values = ", ".join(f"@'{self.__escape_value(v)}'" for v in value) operator = "in~" if all(isinstance(v, str) for v in value) else "in" return f'{field} {operator} ({prepared_values})' return f'{field} == {value}' @@ -40,20 +45,20 @@ def equal_modifier(self, field, value): def contains_modifier(self, field, value): if isinstance(value, list): return f"({self.or_token.join(self.contains_modifier(field=field, value=v) for v in value)})" - return f"{field} contains @'{value}'" + return f"{field} contains @'{self.__escape_value(value)}'" def endswith_modifier(self, field, value): if isinstance(value, list): return f"({self.or_token.join(self.endswith_modifier(field=field, value=v) for v in value)})" - return f"{field} endswith @'{value}'" + return f"{field} endswith @'{self.__escape_value(value)}'" def startswith_modifier(self, field, value): if isinstance(value, list): return f"({self.or_token.join(self.startswith_modifier(field=field, value=v) for v in value)})" - return f"{field} startswith @'{value}'" + return f"{field} startswith @'{self.__escape_value(value)}'" def __regex_modifier(self, field, value): - return f"{field} matches regex @'(?i){value}'" + return f"{field} matches regex @'(?i){self.__escape_value(value)}'" def regex_modifier(self, field, value): if isinstance(value, list): @@ -63,7 +68,7 @@ def regex_modifier(self, field, value): def keywords(self, field, value): if isinstance(value, list): return f"({self.or_token.join(self.keywords(field=field, value=v) for v in value)})" - return f"* contains @'{value}'" + return f"* contains @'{self.__escape_value(value)}'" class MicrosoftSentinelQueryRender(BaseQueryRender): @@ -78,14 +83,11 @@ class MicrosoftSentinelQueryRender(BaseQueryRender): mappings: MicrosoftSentinelMappings = microsoft_sentinel_mappings comment_symbol = "//" + is_multi_line_comment = True def generate_prefix(self, log_source_signature: LogSourceSignature) -> str: return str(log_source_signature) - def render_not_supported_functions(self, not_supported_functions: list) -> str: - render_not_suported = "\n".join([f'// {i}' for i in not_supported_functions]) - return "\n\n" + f"// {self.unsupported_functions_text}" + render_not_suported - def generate_functions(self, functions: list) -> str: if not functions: return "" diff --git a/siem-converter/app/converter/backends/microsoft/tokenizer.py b/siem-converter/app/converter/backends/microsoft/tokenizer.py index 861b5b92..4eafb3f6 100644 --- a/siem-converter/app/converter/backends/microsoft/tokenizer.py +++ b/siem-converter/app/converter/backends/microsoft/tokenizer.py @@ -34,7 +34,7 @@ class MicrosoftSentinelTokenizer(QueryTokenizer, OperatorBasedMixin): single_quotes_value_pattern = r"@?'(?P(?:[:a-zA-Z\*0-9=+%#\-_/,\"\.$&^@!\(\)\{\}\s]|\\\'|\\\\)*)'\s*" str_value_pattern = fr"""{double_quotes_value_pattern}|{single_quotes_value_pattern}""" _value_pattern = fr"""{bool_value_pattern}|{num_value_pattern}|{str_value_pattern}""" - multi_value_pattern = r"""\((?P[:a-zA-Z\"\*0-9=+%#\-_\/\\'\,.&^@!\(\s]*)\)""" + multi_value_pattern = r"""\((?P[:a-zA-Z\"\*0-9=+%#\-_\/\\'\,.&^@!\(\s]+)\)""" keyword_pattern = fr"\*\s+contains\s+(?:{str_value_pattern})" multi_value_operators = ("in", "in~") @@ -50,16 +50,16 @@ def __init__(self, *args, **kwargs): self.operators_map.update(super().operators_map) def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.EQ) -> Tuple[str, Any]: - if num_value := get_match_group(match, group_name='num_value'): + if (num_value := get_match_group(match, group_name='num_value')) is not None: return operator, num_value - elif bool_value := get_match_group(match, group_name='bool_value'): + elif (bool_value := get_match_group(match, group_name='bool_value')) is not None: return operator, bool_value - elif d_q_value := get_match_group(match, group_name='d_q_value'): + elif (d_q_value := get_match_group(match, group_name='d_q_value')) is not None: return operator, d_q_value - elif s_q_value := get_match_group(match, group_name='s_q_value'): + elif (s_q_value := get_match_group(match, group_name='s_q_value')) is not None: return operator, s_q_value return super().get_operator_and_value(match, operator) diff --git a/siem-converter/app/converter/backends/opensearch/renders/opensearch.py b/siem-converter/app/converter/backends/opensearch/renders/opensearch.py index cdaaa255..c0b30ca9 100644 --- a/siem-converter/app/converter/backends/opensearch/renders/opensearch.py +++ b/siem-converter/app/converter/backends/opensearch/renders/opensearch.py @@ -71,6 +71,8 @@ class OpenSearchQueryRender(BaseQueryRender): field_value_map = OpenSearchFieldValue(or_token=or_token) query_pattern = "{query} {functions}" + comment_symbol = "//" + is_multi_line_comment = True def generate_prefix(self, logsource: dict) -> str: return "" diff --git a/siem-converter/app/converter/backends/opensearch/tokenizer.py b/siem-converter/app/converter/backends/opensearch/tokenizer.py index 6ba28c67..a9f0d795 100644 --- a/siem-converter/app/converter/backends/opensearch/tokenizer.py +++ b/siem-converter/app/converter/backends/opensearch/tokenizer.py @@ -64,16 +64,16 @@ def clean_quotes(value: Union[str, int]): return value def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.EQ) -> Tuple[str, Any]: - if num_value := get_match_group(match, group_name='num_value'): + if (num_value := get_match_group(match, group_name='num_value')) is not None: return operator, num_value - elif re_value := get_match_group(match, group_name='re_value'): + elif (re_value := get_match_group(match, group_name='re_value')) is not None: return OperatorType.REGEX, re_value - elif n_q_value := get_match_group(match, group_name='n_q_value'): + elif (n_q_value := get_match_group(match, group_name='n_q_value')) is not None: return operator, n_q_value - elif d_q_value := get_match_group(match, group_name='d_q_value'): + elif (d_q_value := get_match_group(match, group_name='d_q_value')) is not None: return operator, d_q_value return super().get_operator_and_value(match) diff --git a/siem-converter/app/converter/backends/qradar/const.py b/siem-converter/app/converter/backends/qradar/const.py index 5e48657b..079a84fa 100644 --- a/siem-converter/app/converter/backends/qradar/const.py +++ b/siem-converter/app/converter/backends/qradar/const.py @@ -11,7 +11,7 @@ } NUM_VALUE_PATTERN = r"(?P\d+(?:\.\d+)*)" -SINGLE_QUOTES_VALUE_PATTERN = r"""'(?P(?:[:a-zA-Z\*0-9=+%#\-\/\\,_".$&^@!\(\)\{\}\s]|'')+)'""" +SINGLE_QUOTES_VALUE_PATTERN = r"""'(?P(?:[:a-zA-Z\*0-9=+%#\-\/\\,_".$&^@!\(\)\{\}\s]|'')*)'""" qradar_query_details = PlatformDetails(**QRADAR_QUERY_DETAILS) diff --git a/siem-converter/app/converter/backends/qradar/renders/qradar.py b/siem-converter/app/converter/backends/qradar/renders/qradar.py index 360b1f91..116cee47 100644 --- a/siem-converter/app/converter/backends/qradar/renders/qradar.py +++ b/siem-converter/app/converter/backends/qradar/renders/qradar.py @@ -82,4 +82,3 @@ def generate_prefix(self, log_source_signature: QradarLogSourceSignature) -> str def generate_functions(self, functions: list): return "" - diff --git a/siem-converter/app/converter/backends/qradar/tokenizer.py b/siem-converter/app/converter/backends/qradar/tokenizer.py index 11df56b7..f8494ea9 100644 --- a/siem-converter/app/converter/backends/qradar/tokenizer.py +++ b/siem-converter/app/converter/backends/qradar/tokenizer.py @@ -52,13 +52,13 @@ def should_process_value_wildcard_symbols(operator: str) -> bool: return operator.lower() in ("like", "ilike") def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.EQ) -> Tuple[str, Any]: - if num_value := get_match_group(match, group_name='num_value'): + if (num_value := get_match_group(match, group_name='num_value')) is not None: return operator, num_value - elif bool_value := get_match_group(match, group_name='bool_value'): + elif (bool_value := get_match_group(match, group_name='bool_value')) is not None: return operator, bool_value - elif s_q_value := get_match_group(match, group_name='s_q_value'): + elif (s_q_value := get_match_group(match, group_name='s_q_value')) is not None: return operator, s_q_value return super().get_operator_and_value(match, operator) diff --git a/siem-converter/app/converter/backends/splunk/renders/splunk.py b/siem-converter/app/converter/backends/splunk/renders/splunk.py index 86db0029..ae3414f5 100644 --- a/siem-converter/app/converter/backends/splunk/renders/splunk.py +++ b/siem-converter/app/converter/backends/splunk/renders/splunk.py @@ -73,7 +73,3 @@ def generate_functions(self, functions: list): def wrap_with_comment(self, value: str) -> str: return f"{self.comment_symbol} {value} {self.comment_symbol}" - - def render_not_supported_functions(self, not_supported_functions): - render_not_suported = "\n".join(not_supported_functions) - return f'\n\n""" {self.unsupported_functions_text}' + render_not_suported + '"""' diff --git a/siem-converter/app/converter/backends/splunk/renders/splunk_alert.py b/siem-converter/app/converter/backends/splunk/renders/splunk_alert.py index 9b01ebc9..9ac8f679 100644 --- a/siem-converter/app/converter/backends/splunk/renders/splunk_alert.py +++ b/siem-converter/app/converter/backends/splunk/renders/splunk_alert.py @@ -34,7 +34,7 @@ class SplunkAlertFieldValue(SplunkFieldValue): class SplunkAlertRender(SplunkQueryRender): details: PlatformDetails = splunk_alert_details or_token = "OR" - field_value_map = SplunkFieldValue(or_token=or_token) + field_value_map = SplunkAlertFieldValue(or_token=or_token) def finalize_query(self, prefix: str, query: str, functions: str, meta_info: MetaInfoContainer, source_mapping: SourceMapping = None, not_supported_functions: list = None): diff --git a/siem-converter/app/converter/backends/splunk/tokenizer.py b/siem-converter/app/converter/backends/splunk/tokenizer.py index cfbe66ee..82b07649 100644 --- a/siem-converter/app/converter/backends/splunk/tokenizer.py +++ b/siem-converter/app/converter/backends/splunk/tokenizer.py @@ -25,29 +25,29 @@ class SplunkTokenizer(QueryTokenizer): - field_pattern = r"(?P[a-zA-Z\.\-]+)" + field_pattern = r"(?P[a-zA-Z\.\-_\{\}]+)" num_value_pattern = r"(?P\d+(?:\.\d+)*)\s*" - double_quotes_value_pattern = r'"(?P(?:[:a-zA-Z\*0-9=+%#\-_/,\'\.$&^@!\(\)\{\}\s]|\\\"|\\)*)"\s*' - single_quotes_value_pattern = r"'(?P(?:[:a-zA-Z\*0-9=+%#\-_/,\"\.$&^@!\(\)\{\}\s]|\\\'|\\)*)'\s*" + double_quotes_value_pattern = r'"(?P(?:[:a-zA-Z\*0-9=+%#\-_/,;\'\.$&^@!\(\)\{\}\s]|\\\"|\\)*)"\s*' + single_quotes_value_pattern = r"'(?P(?:[:a-zA-Z\*0-9=+%#\-_/,;\"\.$&^@!\(\)\{\}\s]|\\\'|\\)*)'\s*" no_quotes_value = r"(?P(?:[:a-zA-Z\*0-9=+%#\-_/,\.\\$&^@!])+)\s*" _value_pattern = fr"{num_value_pattern}|{no_quotes_value}|{double_quotes_value_pattern}|{single_quotes_value_pattern}" - multi_value_pattern = r"""\((?P[:a-zA-Z\"\*0-9=+%#\-_\/\\'\,.&^@!\(\s]+)\)""" + multi_value_pattern = r"""\((?P[:a-zA-Z\"\*0-9=+%#\-_\/\\'\,;.$&^@!\{\}\(\s]+)\)""" keyword_pattern = double_quotes_value_pattern multi_value_operators = ("in",) wildcard_symbol = "*" def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.EQ) -> Tuple[str, Any]: - if num_value := get_match_group(match, group_name='num_value'): + if (num_value := get_match_group(match, group_name='num_value')) is not None: return operator, num_value - elif no_q_value := get_match_group(match, group_name='no_q_value'): + elif (no_q_value := get_match_group(match, group_name='no_q_value')) is not None: return operator, no_q_value - elif d_q_value := get_match_group(match, group_name='d_q_value'): + elif (d_q_value := get_match_group(match, group_name='d_q_value')) is not None: return operator, d_q_value - elif s_q_value := get_match_group(match, group_name='s_q_value'): + elif (s_q_value := get_match_group(match, group_name='s_q_value')) is not None: return operator, s_q_value return super().get_operator_and_value(match) diff --git a/siem-converter/app/converter/core/models/field.py b/siem-converter/app/converter/core/models/field.py index 2e422a16..e881cf3b 100644 --- a/siem-converter/app/converter/core/models/field.py +++ b/siem-converter/app/converter/core/models/field.py @@ -24,7 +24,7 @@ def __add_value(self, value: Union[int, str, list, tuple]): self.values.extend(value) elif value and isinstance(value, str) and value.isnumeric(): self.values.append(int(value)) - elif value and isinstance(value, (int, str)): + elif value is not None and isinstance(value, (int, str)): self.values.append(value) def __add__(self, other): diff --git a/siem-converter/app/converter/core/render.py b/siem-converter/app/converter/core/render.py index 1b4e92f6..e5e2dae9 100644 --- a/siem-converter/app/converter/core/render.py +++ b/siem-converter/app/converter/core/render.py @@ -84,7 +84,8 @@ class BaseQueryRender: query_pattern = '{table} {query} {functions}' comment_symbol: str = None - unsupported_functions_text = 'Unsupported functions were excluded from the result query:\n' + is_multi_line_comment: bool = False + unsupported_functions_text = 'Unsupported functions were excluded from the result query:' def __init__(self): self.operator_map = { @@ -153,11 +154,12 @@ def finalize_query(self, return query def render_not_supported_functions(self, not_supported_functions: list) -> str: - render_not_supported = "\n".join(f"//{i}" for i in not_supported_functions) - return "\n\n" + f"// {self.unsupported_functions_text}" + render_not_supported + line_template = f"{self.comment_symbol} " if self.comment_symbol and self.is_multi_line_comment else "" + not_supported_functions_str = "\n".join(line_template + func for func in not_supported_functions) + return "\n\n" + self.wrap_with_comment(f"{self.unsupported_functions_text}\n{not_supported_functions_str}") def wrap_with_comment(self, value: str) -> str: - return f"{self.comment_symbol}{value}" + return f"{self.comment_symbol} {value}" def finalize(self, queries_map: Dict[str, str]) -> str: unique_queries = set(queries_map.values()) diff --git a/siem-converter/app/converter/tools/decorators.py b/siem-converter/app/converter/tools/decorators.py index 180cf170..d917563d 100644 --- a/siem-converter/app/converter/tools/decorators.py +++ b/siem-converter/app/converter/tools/decorators.py @@ -9,12 +9,15 @@ def exception_handler(*args, **kwargs): try: result = func(*args, **kwargs) if result: + print("Translated successfully.") return True, result else: + print("Unexpected error.") return False, f"Unexpected error. To resolve it, please, contact us via GitHub." except (BaseParserException, BasePlatformException, BaseRenderException, BaseIOCsException) as err: - print(str(err)) + print(f"Unexpected error. {str(err)}") return False, str(err) except Exception as err: + print(f"Unexpected error. {str(err)}") return False, f"Unexpected error. To resolve it, please, contact us via GitHub." return exception_handler pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy