From 94037ce15365290db0e06320cf4f8523d189034f Mon Sep 17 00:00:00 2001 From: Nazar Gesyk Date: Wed, 22 May 2024 12:38:34 +0300 Subject: [PATCH 1/2] Created base aql platform and fixes --- .../app/translator/core/exceptions/core.py | 4 + .../app/translator/core/mixins/rule.py | 14 +- .../translator/core/models/query_container.py | 7 + uncoder-core/app/translator/core/parser.py | 7 +- .../app/translator/core/render_cti.py | 2 + uncoder-core/app/translator/core/tokenizer.py | 8 +- .../translator/platforms/arcsight/__init__.py | 2 +- .../translator/platforms/arcsight/const.py | 8 ++ .../platforms/arcsight/mappings/__init__.py | 0 .../arcsight/mappings/arcsight_cti.py | 12 ++ .../platforms/arcsight/renders/__init__.py | 0 .../translator/platforms/athena/__init__.py | 6 +- .../translator/platforms/base/aql/__init__.py | 0 .../translator/platforms/base/aql/const.py | 3 + .../platforms/base/aql/escape_manager.py | 8 ++ .../platforms/base/aql/parsers/__init__.py | 0 .../platforms/base/aql/renders/__init__.py | 0 .../platforms/base/aql/renders/aql.py | 122 ++++++++++++++++++ .../{qradar => base/aql}/tokenizer.py | 9 +- .../platforms/carbonblack/__init__.py | 2 +- .../platforms/chronicle/__init__.py | 10 +- .../platforms/crowdstrike/__init__.py | 6 +- .../platforms/elasticsearch/__init__.py | 16 +-- .../platforms/fireeye_helix/__init__.py | 2 +- .../platforms/forti_siem/__init__.py | 2 +- .../translator/platforms/graylog/__init__.py | 6 +- .../translator/platforms/logpoint/__init__.py | 2 +- .../platforms/logrhythm_axon/__init__.py | 4 +- .../translator/platforms/logscale/__init__.py | 10 +- .../platforms/microsoft/__init__.py | 16 +-- .../platforms/opensearch/__init__.py | 8 +- .../platforms/palo_alto/__init__.py | 2 +- .../translator/platforms/qradar/__init__.py | 6 +- .../app/translator/platforms/qradar/const.py | 6 - .../platforms/qradar/escape_manager.py | 8 -- .../translator/platforms/qradar/mapping.py | 88 ------------- .../platforms/qradar/parsers/qradar.py | 97 +------------- .../platforms/qradar/renders/qradar.py | 103 +-------------- .../translator/platforms/qualys/__init__.py | 2 +- .../translator/platforms/roota/__init__.py | 2 +- .../platforms/rsa_netwitness/__init__.py | 2 +- .../platforms/securonix/__init__.py | 2 +- .../platforms/sentinel_one/__init__.py | 2 +- .../translator/platforms/sigma/__init__.py | 4 +- .../platforms/snowflake/__init__.py | 2 +- .../translator/platforms/splunk/__init__.py | 10 +- .../platforms/sumo_logic/__init__.py | 2 +- uncoder-core/requirements.txt | 1 + 48 files changed, 265 insertions(+), 370 deletions(-) create mode 100644 uncoder-core/app/translator/platforms/arcsight/const.py create mode 100644 uncoder-core/app/translator/platforms/arcsight/mappings/__init__.py create mode 100644 uncoder-core/app/translator/platforms/arcsight/mappings/arcsight_cti.py create mode 100644 uncoder-core/app/translator/platforms/arcsight/renders/__init__.py create mode 100644 uncoder-core/app/translator/platforms/base/aql/__init__.py create mode 100644 uncoder-core/app/translator/platforms/base/aql/const.py create mode 100644 uncoder-core/app/translator/platforms/base/aql/escape_manager.py create mode 100644 uncoder-core/app/translator/platforms/base/aql/parsers/__init__.py create mode 100644 uncoder-core/app/translator/platforms/base/aql/renders/__init__.py create mode 100644 uncoder-core/app/translator/platforms/base/aql/renders/aql.py rename uncoder-core/app/translator/platforms/{qradar => base/aql}/tokenizer.py (92%) delete mode 100644 uncoder-core/app/translator/platforms/qradar/escape_manager.py delete mode 100644 uncoder-core/app/translator/platforms/qradar/mapping.py diff --git a/uncoder-core/app/translator/core/exceptions/core.py b/uncoder-core/app/translator/core/exceptions/core.py index a0d27273..68c66962 100644 --- a/uncoder-core/app/translator/core/exceptions/core.py +++ b/uncoder-core/app/translator/core/exceptions/core.py @@ -77,3 +77,7 @@ class InvalidYamlStructure(InvalidRuleStructure): class InvalidJSONStructure(InvalidRuleStructure): rule_type: str = "JSON" + + +class InvalidXMLStructure(InvalidRuleStructure): + rule_type: str = "XML" diff --git a/uncoder-core/app/translator/core/mixins/rule.py b/uncoder-core/app/translator/core/mixins/rule.py index 90cd974a..21e3451e 100644 --- a/uncoder-core/app/translator/core/mixins/rule.py +++ b/uncoder-core/app/translator/core/mixins/rule.py @@ -1,8 +1,10 @@ import json +from typing import Union +import xmltodict import yaml -from app.translator.core.exceptions.core import InvalidJSONStructure, InvalidYamlStructure +from app.translator.core.exceptions.core import InvalidJSONStructure, InvalidXMLStructure, InvalidYamlStructure from app.translator.core.mitre import MitreConfig @@ -36,5 +38,13 @@ def parse_mitre_attack(self, tags: list[str]) -> dict[str, list]: result["techniques"].append(technique) elif tactic := self.mitre_config.get_tactic(tag): result["tactics"].append(tactic) - return result + + +class XMLRuleMixin: + @staticmethod + def load_rule(text: Union[str, bytes]) -> dict: + try: + return xmltodict.parse(text) + except Exception as err: + raise InvalidXMLStructure(error=str(err)) from err diff --git a/uncoder-core/app/translator/core/models/query_container.py b/uncoder-core/app/translator/core/models/query_container.py index 1fd335ee..dccfc180 100644 --- a/uncoder-core/app/translator/core/models/query_container.py +++ b/uncoder-core/app/translator/core/models/query_container.py @@ -56,6 +56,13 @@ class RawQueryContainer: meta_info: MetaInfoContainer = field(default_factory=MetaInfoContainer) +@dataclass +class RawQueryDictContainer: + query: dict + language: str + meta_info: dict + + @dataclass class TokenizedQueryContainer: tokens: list[TOKEN_TYPE] diff --git a/uncoder-core/app/translator/core/parser.py b/uncoder-core/app/translator/core/parser.py index 4702313f..791734be 100644 --- a/uncoder-core/app/translator/core/parser.py +++ b/uncoder-core/app/translator/core/parser.py @@ -32,9 +32,13 @@ class QueryParser(ABC): wrapped_with_comment_pattern: str = None + details: PlatformDetails = None def remove_comments(self, text: str) -> str: - return re.sub(self.wrapped_with_comment_pattern, "\n", text, flags=re.MULTILINE).strip() + if self.wrapped_with_comment_pattern: + return re.sub(self.wrapped_with_comment_pattern, "\n", text, flags=re.MULTILINE).strip() + + return text def parse_raw_query(self, text: str, language: str) -> RawQueryContainer: return RawQueryContainer(query=text, language=language) @@ -47,7 +51,6 @@ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContain class PlatformQueryParser(QueryParser, ABC): mappings: BasePlatformMappings = None tokenizer: QueryTokenizer = None - details: PlatformDetails = None platform_functions: PlatformFunctions = None def get_fields_tokens(self, tokens: list[Union[FieldValue, Keyword, Identifier]]) -> list[Field]: diff --git a/uncoder-core/app/translator/core/render_cti.py b/uncoder-core/app/translator/core/render_cti.py index baec70a0..52a65ea6 100644 --- a/uncoder-core/app/translator/core/render_cti.py +++ b/uncoder-core/app/translator/core/render_cti.py @@ -19,6 +19,7 @@ from app.translator.core.models.iocs import IocsChunkValue +from app.translator.core.models.platform_details import PlatformDetails class RenderCTI: @@ -31,6 +32,7 @@ class RenderCTI: final_result_for_many: str = "union * | where ({result})\n" final_result_for_one: str = "union * | where {result}\n" default_mapping = None + details: PlatformDetails = None def create_field_value(self, field: str, value: str, generic_field: str) -> str: # noqa: ARG002 return self.field_value_template.format(key=field, value=value) diff --git a/uncoder-core/app/translator/core/tokenizer.py b/uncoder-core/app/translator/core/tokenizer.py index b2296221..264cd98a 100644 --- a/uncoder-core/app/translator/core/tokenizer.py +++ b/uncoder-core/app/translator/core/tokenizer.py @@ -52,6 +52,8 @@ class QueryTokenizer(BaseTokenizer): single_value_operators_map: ClassVar[dict[str, str]] = {} # used to generate re pattern. so the keys order is important multi_value_operators_map: ClassVar[dict[str, str]] = {} + # used to generate re pattern. so the keys order is important + fields_operator_map: ClassVar[dict[str, str]] = {} operators_map: ClassVar[dict[str, str]] = {} # used to generate re pattern. so the keys order is important logical_operator_pattern = r"^(?Pand|or|not|AND|OR|NOT)\s+" @@ -73,7 +75,11 @@ class QueryTokenizer(BaseTokenizer): def __init_subclass__(cls, **kwargs): cls._validate_re_patterns() cls.value_pattern = cls.base_value_pattern.replace("___value_pattern___", cls._value_pattern) - cls.operators_map = {**cls.single_value_operators_map, **cls.multi_value_operators_map} + cls.operators_map = { + **cls.single_value_operators_map, + **cls.multi_value_operators_map, + **cls.fields_operator_map, + } cls.operator_pattern = rf"""(?:___field___\s*(?P(?:{'|'.join(cls.operators_map)})))\s*""" @classmethod diff --git a/uncoder-core/app/translator/platforms/arcsight/__init__.py b/uncoder-core/app/translator/platforms/arcsight/__init__.py index 661257f4..cefce570 100644 --- a/uncoder-core/app/translator/platforms/arcsight/__init__.py +++ b/uncoder-core/app/translator/platforms/arcsight/__init__.py @@ -1 +1 @@ -from app.translator.platforms.arcsight.renders.arcsight_cti import ArcsightKeyword +from app.translator.platforms.arcsight.renders.arcsight_cti import ArcsightKeyword # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/arcsight/const.py b/uncoder-core/app/translator/platforms/arcsight/const.py new file mode 100644 index 00000000..0bd27667 --- /dev/null +++ b/uncoder-core/app/translator/platforms/arcsight/const.py @@ -0,0 +1,8 @@ +ARCSIGHT_QUERY_DETAILS = { + "platform_id": "arcsight", + "name": "ArcSight Query", + "group_name": "ArcSight", + "group_id": "arcsight", + "platform_name": "Query", + "alt_platform_name": "CEF", +} diff --git a/uncoder-core/app/translator/platforms/arcsight/mappings/__init__.py b/uncoder-core/app/translator/platforms/arcsight/mappings/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/uncoder-core/app/translator/platforms/arcsight/mappings/arcsight_cti.py b/uncoder-core/app/translator/platforms/arcsight/mappings/arcsight_cti.py new file mode 100644 index 00000000..4a01074d --- /dev/null +++ b/uncoder-core/app/translator/platforms/arcsight/mappings/arcsight_cti.py @@ -0,0 +1,12 @@ +DEFAULT_ARCSIGHT_MAPPING = { + "SourceIP": "sourceAddress", + "DestinationIP": "destinationAddress", + "Domain": "destinationDnsDomain", + "URL": "requestUrl", + "HashMd5": "fileHash", + "HashSha1": "fileHash", + "HashSha256": "fileHash", + "HashSha512": "fileHash", + "Emails": "sender-address", + "Files": "winlog.event_data.TargetFilename", +} diff --git a/uncoder-core/app/translator/platforms/arcsight/renders/__init__.py b/uncoder-core/app/translator/platforms/arcsight/renders/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/uncoder-core/app/translator/platforms/athena/__init__.py b/uncoder-core/app/translator/platforms/athena/__init__.py index e82614ac..adb534b1 100644 --- a/uncoder-core/app/translator/platforms/athena/__init__.py +++ b/uncoder-core/app/translator/platforms/athena/__init__.py @@ -1,3 +1,3 @@ -from app.translator.platforms.athena.parsers.athena import AthenaQueryParser -from app.translator.platforms.athena.renders.athena import AthenaQueryRender -from app.translator.platforms.athena.renders.athena_cti import AthenaCTI +from app.translator.platforms.athena.parsers.athena import AthenaQueryParser # noqa: F401 +from app.translator.platforms.athena.renders.athena import AthenaQueryRender # noqa: F401 +from app.translator.platforms.athena.renders.athena_cti import AthenaCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/base/aql/__init__.py b/uncoder-core/app/translator/platforms/base/aql/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/uncoder-core/app/translator/platforms/base/aql/const.py b/uncoder-core/app/translator/platforms/base/aql/const.py new file mode 100644 index 00000000..267ead7d --- /dev/null +++ b/uncoder-core/app/translator/platforms/base/aql/const.py @@ -0,0 +1,3 @@ +UTF8_PAYLOAD_PATTERN = r"UTF8\(payload\)" +NUM_VALUE_PATTERN = r"(?P\d+(?:\.\d+)*)" +SINGLE_QUOTES_VALUE_PATTERN = r"""'(?P(?:[:a-zA-Z\*0-9=+%#\-\/\\,_".$&^@!\(\)\{\}\s]|'')*)'""" diff --git a/uncoder-core/app/translator/platforms/base/aql/escape_manager.py b/uncoder-core/app/translator/platforms/base/aql/escape_manager.py new file mode 100644 index 00000000..fd172ba2 --- /dev/null +++ b/uncoder-core/app/translator/platforms/base/aql/escape_manager.py @@ -0,0 +1,8 @@ +from app.translator.core.escape_manager import EscapeManager + + +class AQLEscapeManager(EscapeManager): + ... + + +aql_escape_manager = AQLEscapeManager() diff --git a/uncoder-core/app/translator/platforms/base/aql/parsers/__init__.py b/uncoder-core/app/translator/platforms/base/aql/parsers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/uncoder-core/app/translator/platforms/base/aql/renders/__init__.py b/uncoder-core/app/translator/platforms/base/aql/renders/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/uncoder-core/app/translator/platforms/base/aql/renders/aql.py b/uncoder-core/app/translator/platforms/base/aql/renders/aql.py new file mode 100644 index 00000000..ba4caa10 --- /dev/null +++ b/uncoder-core/app/translator/platforms/base/aql/renders/aql.py @@ -0,0 +1,122 @@ +""" +Uncoder IO Community Edition License +----------------------------------------------------------------- +Copyright (c) 2024 SOC Prime, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +----------------------------------------------------------------- +""" +from typing import Union + +from app.translator.const import DEFAULT_VALUE_TYPE +from app.translator.core.custom_types.values import ValueType +from app.translator.core.render import BaseQueryFieldValue, PlatformQueryRender +from app.translator.platforms.base.aql.escape_manager import aql_escape_manager +from app.translator.platforms.base.aql.mapping import AQLLogSourceSignature, AQLMappings, aql_mappings + + +class AQLFieldValue(BaseQueryFieldValue): + escape_manager = aql_escape_manager + + def apply_value(self, value: Union[str, int], value_type: str = ValueType.value) -> Union[str, int]: # noqa: ARG002 + if isinstance(value, str): + value = value.replace("_", "__").replace("%", "%%").replace("\\'", "%").replace("'", '"') + if value.endswith("\\\\%"): + value = value.replace("\\\\%", "\\%") + return value + + def _apply_value(self, value: Union[str, int]) -> Union[str, int]: + if isinstance(value, str) and "\\" in value: + return value + return self.apply_value(value) + + def equal_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join([self.equal_modifier(field=field, value=v) for v in value])})" + if field == "UTF8(payload)": + return f"UTF8(payload) ILIKE '{self.apply_value(value)}'" + if isinstance(value, int): + return f'"{field}"={value}' + + return f"\"{field}\"='{self._apply_value(value)}'" + + def less_modifier(self, field: str, value: Union[int, str]) -> str: + if isinstance(value, int): + return f'"{field}"<{value}' + return f"\"{field}\"<'{self._apply_value(value)}'" + + def less_or_equal_modifier(self, field: str, value: Union[int, str]) -> str: + if isinstance(value, int): + return f'"{field}"<={value}' + return f"\"{field}\"<='{self._apply_value(value)}'" + + def greater_modifier(self, field: str, value: Union[int, str]) -> str: + if isinstance(value, int): + return f'"{field}">{value}' + return f"\"{field}\">'{self._apply_value(value)}'" + + def greater_or_equal_modifier(self, field: str, value: Union[int, str]) -> str: + if isinstance(value, int): + return f'"{field}">={value}' + return f"\"{field}\">='{self._apply_value(value)}'" + + def not_equal_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join([self.not_equal_modifier(field=field, value=v) for v in value])})" + if isinstance(value, int): + return f'"{field}"!={value}' + return f"\"{field}\"!='{self._apply_value(value)}'" + + def contains_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join(self.contains_modifier(field=field, value=v) for v in value)})" + return f"\"{field}\" ILIKE '%{self._apply_value(value)}%'" + + def endswith_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join(self.endswith_modifier(field=field, value=v) for v in value)})" + return f"\"{field}\" ILIKE '%{self._apply_value(value)}'" + + def startswith_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join(self.startswith_modifier(field=field, value=v) for v in value)})" + return f"\"{field}\" ILIKE '{self._apply_value(value)}%'" + + def regex_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join(self.regex_modifier(field=field, value=v) for v in value)})" + return f"\"{field}\" IMATCHES '{value}'" + + def keywords(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join(self.keywords(field=field, value=v) for v in value)})" + return f"UTF8(payload) ILIKE '%{self.apply_value(value)}%'" + + +class AQLQueryRender(PlatformQueryRender): + mappings: AQLMappings = aql_mappings + + or_token = "OR" + and_token = "AND" + not_token = "NOT" + + field_value_map = AQLFieldValue(or_token=or_token) + query_pattern = "{prefix} AND {query} {functions}" + + def generate_prefix(self, log_source_signature: AQLLogSourceSignature) -> str: + table = str(log_source_signature) + extra_condition = log_source_signature.extra_condition + return f"SELECT UTF8(payload) FROM {table} WHERE {extra_condition}" + + def wrap_with_comment(self, value: str) -> str: + return f"/* {value} */" diff --git a/uncoder-core/app/translator/platforms/qradar/tokenizer.py b/uncoder-core/app/translator/platforms/base/aql/tokenizer.py similarity index 92% rename from uncoder-core/app/translator/platforms/qradar/tokenizer.py rename to uncoder-core/app/translator/platforms/base/aql/tokenizer.py index bdd2eecb..39e46b5d 100644 --- a/uncoder-core/app/translator/platforms/qradar/tokenizer.py +++ b/uncoder-core/app/translator/platforms/base/aql/tokenizer.py @@ -15,7 +15,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ----------------------------------------------------------------- """ - import re from typing import Any, ClassVar, Union @@ -24,12 +23,12 @@ from app.translator.core.models.field import FieldValue, Keyword from app.translator.core.models.identifier import Identifier from app.translator.core.tokenizer import QueryTokenizer -from app.translator.platforms.qradar.const import NUM_VALUE_PATTERN, SINGLE_QUOTES_VALUE_PATTERN, UTF8_PAYLOAD_PATTERN -from app.translator.platforms.qradar.escape_manager import qradar_escape_manager +from app.translator.platforms.base.aql.const import NUM_VALUE_PATTERN, SINGLE_QUOTES_VALUE_PATTERN, UTF8_PAYLOAD_PATTERN +from app.translator.platforms.base.aql.escape_manager import aql_escape_manager from app.translator.tools.utils import get_match_group -class QradarTokenizer(QueryTokenizer): +class AQLTokenizer(QueryTokenizer): single_value_operators_map: ClassVar[dict[str, str]] = { "=": OperatorType.EQ, "<=": OperatorType.LTE, @@ -49,7 +48,7 @@ class QradarTokenizer(QueryTokenizer): _value_pattern = rf"{NUM_VALUE_PATTERN}|{bool_value_pattern}|{SINGLE_QUOTES_VALUE_PATTERN}" multi_value_pattern = rf"""\((?P<{ValueType.multi_value}>[:a-zA-Z\"\*0-9=+%#\-_\/\\'\,.&^@!\(\s]*)\)""" keyword_pattern = rf"{UTF8_PAYLOAD_PATTERN}\s+(?:like|LIKE|ilike|ILIKE)\s+{SINGLE_QUOTES_VALUE_PATTERN}" - escape_manager = qradar_escape_manager + escape_manager = aql_escape_manager wildcard_symbol = "%" diff --git a/uncoder-core/app/translator/platforms/carbonblack/__init__.py b/uncoder-core/app/translator/platforms/carbonblack/__init__.py index 72cd0014..715f3a24 100644 --- a/uncoder-core/app/translator/platforms/carbonblack/__init__.py +++ b/uncoder-core/app/translator/platforms/carbonblack/__init__.py @@ -1 +1 @@ -from app.translator.platforms.carbonblack.renders.carbonblack_cti import CarbonBlackCTI +from app.translator.platforms.carbonblack.renders.carbonblack_cti import CarbonBlackCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/chronicle/__init__.py b/uncoder-core/app/translator/platforms/chronicle/__init__.py index 0fbcb0e2..700cd191 100644 --- a/uncoder-core/app/translator/platforms/chronicle/__init__.py +++ b/uncoder-core/app/translator/platforms/chronicle/__init__.py @@ -1,5 +1,5 @@ -from app.translator.platforms.chronicle.parsers.chronicle import ChronicleQueryParser -from app.translator.platforms.chronicle.parsers.chronicle_rule import ChronicleRuleParser -from app.translator.platforms.chronicle.renders.chronicle import ChronicleQueryRender -from app.translator.platforms.chronicle.renders.chronicle_cti import ChronicleQueryCTI -from app.translator.platforms.chronicle.renders.chronicle_rule import ChronicleSecurityRuleRender +from app.translator.platforms.chronicle.parsers.chronicle import ChronicleQueryParser # noqa: F401 +from app.translator.platforms.chronicle.parsers.chronicle_rule import ChronicleRuleParser # noqa: F401 +from app.translator.platforms.chronicle.renders.chronicle import ChronicleQueryRender # noqa: F401 +from app.translator.platforms.chronicle.renders.chronicle_cti import ChronicleQueryCTI # noqa: F401 +from app.translator.platforms.chronicle.renders.chronicle_rule import ChronicleSecurityRuleRender # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/crowdstrike/__init__.py b/uncoder-core/app/translator/platforms/crowdstrike/__init__.py index e641e4b0..c7e9dfb5 100644 --- a/uncoder-core/app/translator/platforms/crowdstrike/__init__.py +++ b/uncoder-core/app/translator/platforms/crowdstrike/__init__.py @@ -1,3 +1,3 @@ -from app.translator.platforms.crowdstrike.parsers.crowdstrike import CrowdStrikeQueryParser -from app.translator.platforms.crowdstrike.renders.crowdstrike import CrowdStrikeQueryRender -from app.translator.platforms.crowdstrike.renders.crowdstrike_cti import CrowdStrikeCTI +from app.translator.platforms.crowdstrike.parsers.crowdstrike import CrowdStrikeQueryParser # noqa: F401 +from app.translator.platforms.crowdstrike.renders.crowdstrike import CrowdStrikeQueryRender # noqa: F401 +from app.translator.platforms.crowdstrike.renders.crowdstrike_cti import CrowdStrikeCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/elasticsearch/__init__.py b/uncoder-core/app/translator/platforms/elasticsearch/__init__.py index 4dc1ac91..96017e2e 100644 --- a/uncoder-core/app/translator/platforms/elasticsearch/__init__.py +++ b/uncoder-core/app/translator/platforms/elasticsearch/__init__.py @@ -1,8 +1,8 @@ -from app.translator.platforms.elasticsearch.parsers.detection_rule import ElasticSearchRuleParser -from app.translator.platforms.elasticsearch.parsers.elasticsearch import ElasticSearchQueryParser -from app.translator.platforms.elasticsearch.renders.detection_rule import ElasticSearchRuleRender -from app.translator.platforms.elasticsearch.renders.elast_alert import ElastAlertRuleRender -from app.translator.platforms.elasticsearch.renders.elasticsearch import ElasticSearchQueryRender -from app.translator.platforms.elasticsearch.renders.elasticsearch_cti import ElasticsearchCTI -from app.translator.platforms.elasticsearch.renders.kibana import KibanaRuleRender -from app.translator.platforms.elasticsearch.renders.xpack_watcher import XPackWatcherRuleRender +from app.translator.platforms.elasticsearch.parsers.detection_rule import ElasticSearchRuleParser # noqa: F401 +from app.translator.platforms.elasticsearch.parsers.elasticsearch import ElasticSearchQueryParser # noqa: F401 +from app.translator.platforms.elasticsearch.renders.detection_rule import ElasticSearchRuleRender # noqa: F401 +from app.translator.platforms.elasticsearch.renders.elast_alert import ElastAlertRuleRender # noqa: F401 +from app.translator.platforms.elasticsearch.renders.elasticsearch import ElasticSearchQueryRender # noqa: F401 +from app.translator.platforms.elasticsearch.renders.elasticsearch_cti import ElasticsearchCTI # noqa: F401 +from app.translator.platforms.elasticsearch.renders.kibana import KibanaRuleRender # noqa: F401 +from app.translator.platforms.elasticsearch.renders.xpack_watcher import XPackWatcherRuleRender # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/fireeye_helix/__init__.py b/uncoder-core/app/translator/platforms/fireeye_helix/__init__.py index d90f3965..3b24c1b6 100644 --- a/uncoder-core/app/translator/platforms/fireeye_helix/__init__.py +++ b/uncoder-core/app/translator/platforms/fireeye_helix/__init__.py @@ -1 +1 @@ -from app.translator.platforms.fireeye_helix.renders.fireeye_helix_cti import FireeyeHelixCTI +from app.translator.platforms.fireeye_helix.renders.fireeye_helix_cti import FireeyeHelixCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/forti_siem/__init__.py b/uncoder-core/app/translator/platforms/forti_siem/__init__.py index 479c80ef..11aaf8b6 100644 --- a/uncoder-core/app/translator/platforms/forti_siem/__init__.py +++ b/uncoder-core/app/translator/platforms/forti_siem/__init__.py @@ -1 +1 @@ -from app.translator.platforms.forti_siem.renders.forti_siem_rule import FortiSiemRuleRender +from app.translator.platforms.forti_siem.renders.forti_siem_rule import FortiSiemRuleRender # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/graylog/__init__.py b/uncoder-core/app/translator/platforms/graylog/__init__.py index d0b256ae..9af149ad 100644 --- a/uncoder-core/app/translator/platforms/graylog/__init__.py +++ b/uncoder-core/app/translator/platforms/graylog/__init__.py @@ -1,3 +1,3 @@ -from app.translator.platforms.graylog.parsers.graylog import GraylogQueryParser -from app.translator.platforms.graylog.renders.graylog import GraylogQueryRender -from app.translator.platforms.graylog.renders.graylog_cti import GraylogCTI +from app.translator.platforms.graylog.parsers.graylog import GraylogQueryParser # noqa: F401 +from app.translator.platforms.graylog.renders.graylog import GraylogQueryRender # noqa: F401 +from app.translator.platforms.graylog.renders.graylog_cti import GraylogCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/logpoint/__init__.py b/uncoder-core/app/translator/platforms/logpoint/__init__.py index e8437684..743bd5e2 100644 --- a/uncoder-core/app/translator/platforms/logpoint/__init__.py +++ b/uncoder-core/app/translator/platforms/logpoint/__init__.py @@ -1 +1 @@ -from app.translator.platforms.logpoint.renders.logpoint_cti import LogpointCTI +from app.translator.platforms.logpoint.renders.logpoint_cti import LogpointCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/logrhythm_axon/__init__.py b/uncoder-core/app/translator/platforms/logrhythm_axon/__init__.py index 2ec2c0fb..1f03d01a 100644 --- a/uncoder-core/app/translator/platforms/logrhythm_axon/__init__.py +++ b/uncoder-core/app/translator/platforms/logrhythm_axon/__init__.py @@ -1,2 +1,2 @@ -from app.translator.platforms.logrhythm_axon.renders.logrhythm_axon_query import LogRhythmAxonQueryRender -from app.translator.platforms.logrhythm_axon.renders.logrhythm_axon_rule import LogRhythmAxonRuleRender +from app.translator.platforms.logrhythm_axon.renders.logrhythm_axon_query import LogRhythmAxonQueryRender # noqa: F401 +from app.translator.platforms.logrhythm_axon.renders.logrhythm_axon_rule import LogRhythmAxonRuleRender # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/logscale/__init__.py b/uncoder-core/app/translator/platforms/logscale/__init__.py index 7e988758..54f3f3db 100644 --- a/uncoder-core/app/translator/platforms/logscale/__init__.py +++ b/uncoder-core/app/translator/platforms/logscale/__init__.py @@ -1,5 +1,5 @@ -from app.translator.platforms.logscale.parsers.logscale import LogScaleQueryParser -from app.translator.platforms.logscale.parsers.logscale_alert import LogScaleAlertParser -from app.translator.platforms.logscale.renders.logscale import LogScaleQueryRender -from app.translator.platforms.logscale.renders.logscale_alert import LogScaleAlertRender -from app.translator.platforms.logscale.renders.logscale_cti import LogScaleCTI +from app.translator.platforms.logscale.parsers.logscale import LogScaleQueryParser # noqa: F401 +from app.translator.platforms.logscale.parsers.logscale_alert import LogScaleAlertParser # noqa: F401 +from app.translator.platforms.logscale.renders.logscale import LogScaleQueryRender # noqa: F401 +from app.translator.platforms.logscale.renders.logscale_alert import LogScaleAlertRender # noqa: F401 +from app.translator.platforms.logscale.renders.logscale_cti import LogScaleCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/microsoft/__init__.py b/uncoder-core/app/translator/platforms/microsoft/__init__.py index 1f85387e..623fe77a 100644 --- a/uncoder-core/app/translator/platforms/microsoft/__init__.py +++ b/uncoder-core/app/translator/platforms/microsoft/__init__.py @@ -1,8 +1,8 @@ -from app.translator.platforms.microsoft.parsers.microsoft_defender import MicrosoftDefenderQueryParser -from app.translator.platforms.microsoft.parsers.microsoft_sentinel import MicrosoftSentinelQueryParser -from app.translator.platforms.microsoft.parsers.microsoft_sentinel_rule import MicrosoftSentinelRuleParser -from app.translator.platforms.microsoft.renders.microsoft_defender import MicrosoftDefenderQueryRender -from app.translator.platforms.microsoft.renders.microsoft_defender_cti import MicrosoftDefenderCTI -from app.translator.platforms.microsoft.renders.microsoft_sentinel import MicrosoftSentinelQueryRender -from app.translator.platforms.microsoft.renders.microsoft_sentinel_cti import MicrosoftSentinelCTI -from app.translator.platforms.microsoft.renders.microsoft_sentinel_rule import MicrosoftSentinelRuleRender +from app.translator.platforms.microsoft.parsers.microsoft_defender import MicrosoftDefenderQueryParser # noqa: F401 +from app.translator.platforms.microsoft.parsers.microsoft_sentinel import MicrosoftSentinelQueryParser # noqa: F401 +from app.translator.platforms.microsoft.parsers.microsoft_sentinel_rule import MicrosoftSentinelRuleParser # noqa: F401 +from app.translator.platforms.microsoft.renders.microsoft_defender import MicrosoftDefenderQueryRender # noqa: F401 +from app.translator.platforms.microsoft.renders.microsoft_defender_cti import MicrosoftDefenderCTI # noqa: F401 +from app.translator.platforms.microsoft.renders.microsoft_sentinel import MicrosoftSentinelQueryRender # noqa: F401 +from app.translator.platforms.microsoft.renders.microsoft_sentinel_cti import MicrosoftSentinelCTI # noqa: F401 +from app.translator.platforms.microsoft.renders.microsoft_sentinel_rule import MicrosoftSentinelRuleRender # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/opensearch/__init__.py b/uncoder-core/app/translator/platforms/opensearch/__init__.py index a46e7a32..d450a659 100644 --- a/uncoder-core/app/translator/platforms/opensearch/__init__.py +++ b/uncoder-core/app/translator/platforms/opensearch/__init__.py @@ -1,4 +1,4 @@ -from app.translator.platforms.opensearch.parsers.opensearch import OpenSearchQueryParser -from app.translator.platforms.opensearch.renders.opensearch import OpenSearchQueryRender -from app.translator.platforms.opensearch.renders.opensearch_cti import OpenSearchCTI -from app.translator.platforms.opensearch.renders.opensearch_rule import OpenSearchRuleRender +from app.translator.platforms.opensearch.parsers.opensearch import OpenSearchQueryParser # noqa: F401 +from app.translator.platforms.opensearch.renders.opensearch import OpenSearchQueryRender # noqa: F401 +from app.translator.platforms.opensearch.renders.opensearch_cti import OpenSearchCTI # noqa: F401 +from app.translator.platforms.opensearch.renders.opensearch_rule import OpenSearchRuleRender # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/palo_alto/__init__.py b/uncoder-core/app/translator/platforms/palo_alto/__init__.py index 732d1f28..437bfbd7 100644 --- a/uncoder-core/app/translator/platforms/palo_alto/__init__.py +++ b/uncoder-core/app/translator/platforms/palo_alto/__init__.py @@ -1 +1 @@ -from app.translator.platforms.palo_alto.renders.cortex_xsiam import CortexXQLQueryRender +from app.translator.platforms.palo_alto.renders.cortex_xsiam import CortexXQLQueryRender # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/qradar/__init__.py b/uncoder-core/app/translator/platforms/qradar/__init__.py index 2852a4b1..09ed0612 100644 --- a/uncoder-core/app/translator/platforms/qradar/__init__.py +++ b/uncoder-core/app/translator/platforms/qradar/__init__.py @@ -1,3 +1,3 @@ -from app.translator.platforms.qradar.parsers.qradar import QradarQueryParser -from app.translator.platforms.qradar.renders.qradar import QradarQueryRender -from app.translator.platforms.qradar.renders.qradar_cti import QRadarCTI +from app.translator.platforms.qradar.parsers.qradar import QradarQueryParser # noqa: F401 +from app.translator.platforms.qradar.renders.qradar import QradarQueryRender # noqa: F401 +from app.translator.platforms.qradar.renders.qradar_cti import QRadarCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/qradar/const.py b/uncoder-core/app/translator/platforms/qradar/const.py index 97117029..5143509a 100644 --- a/uncoder-core/app/translator/platforms/qradar/const.py +++ b/uncoder-core/app/translator/platforms/qradar/const.py @@ -1,7 +1,5 @@ from app.translator.core.models.platform_details import PlatformDetails -UTF8_PAYLOAD_PATTERN = r"UTF8\(payload\)" - QRADAR_QUERY_DETAILS = { "platform_id": "qradar-aql-query", "name": "QRadar Query", @@ -10,8 +8,4 @@ "group_name": "QRadar", } -NUM_VALUE_PATTERN = r"(?P\d+(?:\.\d+)*)" -SINGLE_QUOTES_VALUE_PATTERN = r"""'(?P(?:[:a-zA-Z\*0-9=+%#\-\/\\,_".$&^@!\(\)\{\}\s]|'')*)'""" - - qradar_query_details = PlatformDetails(**QRADAR_QUERY_DETAILS) diff --git a/uncoder-core/app/translator/platforms/qradar/escape_manager.py b/uncoder-core/app/translator/platforms/qradar/escape_manager.py deleted file mode 100644 index 206cf20e..00000000 --- a/uncoder-core/app/translator/platforms/qradar/escape_manager.py +++ /dev/null @@ -1,8 +0,0 @@ -from app.translator.core.escape_manager import EscapeManager - - -class QradarEscapeManager(EscapeManager): - ... - - -qradar_escape_manager = QradarEscapeManager() diff --git a/uncoder-core/app/translator/platforms/qradar/mapping.py b/uncoder-core/app/translator/platforms/qradar/mapping.py deleted file mode 100644 index 2ae3fddf..00000000 --- a/uncoder-core/app/translator/platforms/qradar/mapping.py +++ /dev/null @@ -1,88 +0,0 @@ -from typing import Optional - -from app.translator.core.mapping import DEFAULT_MAPPING_NAME, BasePlatformMappings, LogSourceSignature, SourceMapping - - -class QradarLogSourceSignature(LogSourceSignature): - def __init__( - self, - tables: Optional[list[str]], - device_types: Optional[list[int]], - categories: Optional[list[int]], - qids: Optional[list[int]], - qid_event_categories: Optional[list[int]], - default_source: dict, - ): - self.tables = set(tables or []) - self.device_types = set(device_types or []) - self.categories = set(categories or []) - self.qids = set(qids or []) - self.qid_event_categories = set(qid_event_categories or []) - self._default_source = default_source or {} - - def is_suitable( - self, - table: list[str], - devicetype: Optional[list[int]], - category: Optional[list[int]], - qid: Optional[list[int]], - qideventcategory: Optional[list[int]], - ) -> bool: - table_match = set(table).issubset(self.tables) - device_type_match = set(devicetype or []).issubset(self.device_types) - category_match = set(category or []).issubset(self.categories) - qid_match = set(qid or []).issubset(self.qids) - qid_event_category_match = set(qideventcategory or []).issubset(self.qid_event_categories) - - return table_match and device_type_match and category_match and qid_match and qid_event_category_match - - def __str__(self) -> str: - return self._default_source.get("table", "events") - - @property - def extra_condition(self) -> str: - default_source = self._default_source - return " AND ".join((f"{key}={value}" for key, value in default_source.items() if key != "table" and value)) - - -class QradarMappings(BasePlatformMappings): - def prepare_log_source_signature(self, mapping: dict) -> QradarLogSourceSignature: - log_source = mapping.get("log_source", {}) - default_log_source = mapping["default_log_source"] - return QradarLogSourceSignature( - tables=log_source.get("table"), - device_types=log_source.get("devicetype"), - categories=log_source.get("category"), - qids=log_source.get("qid"), - qid_event_categories=log_source.get("qideventcategory"), - default_source=default_log_source, - ) - - def get_suitable_source_mappings( - self, - field_names: list[str], - table: list[str], - devicetype: Optional[list[int]] = None, - category: Optional[list[int]] = None, - qid: Optional[list[int]] = None, - qideventcategory: Optional[list[int]] = None, - ) -> list[SourceMapping]: - suitable_source_mappings = [] - for source_mapping in self._source_mappings.values(): - if source_mapping.source_id == DEFAULT_MAPPING_NAME: - continue - - log_source_signature: QradarLogSourceSignature = source_mapping.log_source_signature - if table and log_source_signature.is_suitable(table, devicetype, category, qid, qideventcategory): - if source_mapping.fields_mapping.is_suitable(field_names): - suitable_source_mappings.append(source_mapping) - elif source_mapping.fields_mapping.is_suitable(field_names): - suitable_source_mappings.append(source_mapping) - - if not suitable_source_mappings: - suitable_source_mappings = [self._source_mappings[DEFAULT_MAPPING_NAME]] - - return suitable_source_mappings - - -qradar_mappings = QradarMappings(platform_dir="qradar") diff --git a/uncoder-core/app/translator/platforms/qradar/parsers/qradar.py b/uncoder-core/app/translator/platforms/qradar/parsers/qradar.py index bc153760..c74d3f1f 100644 --- a/uncoder-core/app/translator/platforms/qradar/parsers/qradar.py +++ b/uncoder-core/app/translator/platforms/qradar/parsers/qradar.py @@ -16,105 +16,14 @@ ----------------------------------------------------------------- """ -import re -from typing import Union - from app.translator.core.models.platform_details import PlatformDetails -from app.translator.core.models.query_container import RawQueryContainer, TokenizedQueryContainer -from app.translator.core.parser import PlatformQueryParser from app.translator.managers import parser_manager -from app.translator.platforms.qradar.const import NUM_VALUE_PATTERN, SINGLE_QUOTES_VALUE_PATTERN, qradar_query_details -from app.translator.platforms.qradar.mapping import QradarMappings, qradar_mappings -from app.translator.platforms.qradar.tokenizer import QradarTokenizer -from app.translator.tools.utils import get_match_group +from app.translator.platforms.base.aql.parsers.aql import AQLQueryParser +from app.translator.platforms.qradar.const import qradar_query_details @parser_manager.register_supported_by_roota -class QradarQueryParser(PlatformQueryParser): +class QradarQueryParser(AQLQueryParser): details: PlatformDetails = qradar_query_details - tokenizer = QradarTokenizer() - mappings: QradarMappings = qradar_mappings - - log_source_functions = ("LOGSOURCENAME", "LOGSOURCEGROUPNAME", "LOGSOURCETYPENAME", "CATEGORYNAME") - log_source_function_pattern = r"\(?(?P___func_name___\([a-zA-Z]+\))(?:\s+like\s+|\s+ilike\s+|\s*=\s*)'(?P[%a-zA-Z\s]+)'\s*\)?\s+(?:and|or)?\s" # noqa: E501 - - log_source_key_types = ("devicetype", "category", "qid", "qideventcategory") - log_source_pattern = rf"___source_type___(?:\s+like\s+|\s+ilike\s+|\s*=\s*)(?:{SINGLE_QUOTES_VALUE_PATTERN}|{NUM_VALUE_PATTERN})(?:\s+(?:and|or)\s+|\s+)?" # noqa: E501 - num_value_pattern = r"[0-9]+" - multi_num_log_source_pattern = ( - rf"___source_type___\s+in\s+\((?P(?:{num_value_pattern}(?:\s*,\s*)?)+)\)(?:\s+(?:and|or)\s+|\s+)?" - ) - str_value_pattern = r"""(?:')(?P(?:[:a-zA-Z\*0-9=+%#\-\/\\,_".$&^@!\(\)\{\}\s]|'')+)(?:')""" - multi_str_log_source_pattern = ( - rf"""___source_type___\s+in\s+\((?P(?:{str_value_pattern}(?:\s*,\s*)?)+)\)(?:\s+(?:and|or)\s+|\s+)?""" - ) - - table_pattern = r"\sFROM\s(?P[a-zA-Z\.\-\*]+)\sWHERE\s" wrapped_with_comment_pattern = r"^\s*/\*(?:|\n|.)*\*/" - - def __clean_query(self, query: str) -> str: - for func_name in self.log_source_functions: - pattern = self.log_source_function_pattern.replace("___func_name___", func_name) - while search := re.search(pattern, query, flags=re.IGNORECASE): - pos_start = search.start() - pos_end = search.end() - query = query[:pos_start] + query[pos_end:] - - return query - - @staticmethod - def __parse_multi_value_log_source( - match: re.Match, query: str, pattern: str - ) -> tuple[str, Union[list[str], list[int]]]: - value = match.group("value") - pos_start = match.start() - pos_end = match.end() - query = query[:pos_start] + query[pos_end:] - return query, re.findall(pattern, value) - - def __parse_log_sources(self, query: str) -> tuple[dict[str, Union[list[str], list[int]]], str]: - log_sources = {} - - if search := re.search(self.table_pattern, query, flags=re.IGNORECASE): - log_sources["table"] = [search.group("table")] - pos_end = search.end() - query = query[pos_end:] - - for log_source_key in self.log_source_key_types: - pattern = self.log_source_pattern.replace("___source_type___", log_source_key) - while search := re.search(pattern, query, flags=re.IGNORECASE): - num_value = get_match_group(search, group_name="num_value") - str_value = get_match_group(search, group_name="s_q_value") - value = num_value and int(num_value) or str_value - log_sources.setdefault(log_source_key, []).append(value) - pos_start = search.start() - pos_end = search.end() - query = query[:pos_start] + query[pos_end:] - - pattern = self.multi_num_log_source_pattern.replace("___source_type___", log_source_key) - if search := re.search(pattern, query, flags=re.IGNORECASE): - query, values = self.__parse_multi_value_log_source(search, query, self.num_value_pattern) - values = [int(v) for v in values] - log_sources.setdefault(log_source_key, []).extend(values) - - pattern = self.multi_str_log_source_pattern.replace("___source_type___", log_source_key) - if search := re.search(pattern, query, flags=re.IGNORECASE): - query, values = self.__parse_multi_value_log_source(search, query, self.str_value_pattern) - log_sources.setdefault(log_source_key, []).extend(values) - - return log_sources, query - - def _parse_query(self, text: str) -> tuple[str, dict[str, Union[list[str], list[int]]]]: - query = self.__clean_query(text) - log_sources, query = self.__parse_log_sources(query) - return query, log_sources - - def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer: - query, log_sources = self._parse_query(raw_query_container.query) - tokens, source_mappings = self.get_tokens_and_source_mappings(query, log_sources) - fields_tokens = self.get_fields_tokens(tokens=tokens) - meta_info = raw_query_container.meta_info - meta_info.query_fields = fields_tokens - meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings] - return TokenizedQueryContainer(tokens=tokens, meta_info=meta_info) diff --git a/uncoder-core/app/translator/platforms/qradar/renders/qradar.py b/uncoder-core/app/translator/platforms/qradar/renders/qradar.py index 8990b24f..e7c92b76 100644 --- a/uncoder-core/app/translator/platforms/qradar/renders/qradar.py +++ b/uncoder-core/app/translator/platforms/qradar/renders/qradar.py @@ -16,113 +16,16 @@ limitations under the License. ----------------------------------------------------------------- """ -from typing import Union - -from app.translator.const import DEFAULT_VALUE_TYPE -from app.translator.core.custom_types.values import ValueType from app.translator.core.models.platform_details import PlatformDetails -from app.translator.core.render import BaseQueryFieldValue, PlatformQueryRender from app.translator.managers import render_manager +from app.translator.platforms.base.aql.renders.aql import AQLFieldValue, AQLQueryRender from app.translator.platforms.qradar.const import qradar_query_details -from app.translator.platforms.qradar.escape_manager import qradar_escape_manager -from app.translator.platforms.qradar.mapping import QradarLogSourceSignature, QradarMappings, qradar_mappings -class QradarFieldValue(BaseQueryFieldValue): +class QradarFieldValue(AQLFieldValue): details: PlatformDetails = qradar_query_details - escape_manager = qradar_escape_manager - - def apply_value(self, value: Union[str, int], value_type: str = ValueType.value) -> Union[str, int]: # noqa: ARG002 - if isinstance(value, str): - value = value.replace("_", "__").replace("%", "%%").replace("\\'", "%").replace("'", '"') - if value.endswith("\\\\%"): - value = value.replace("\\\\%", "\\%") - return value - - def _apply_value(self, value: Union[str, int]) -> Union[str, int]: - if isinstance(value, str) and "\\" in value: - return value - return self.apply_value(value) - - def equal_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: - if isinstance(value, list): - return f"({self.or_token.join([self.equal_modifier(field=field, value=v) for v in value])})" - if field == "UTF8(payload)": - return f"UTF8(payload) ILIKE '{self.apply_value(value)}'" - if isinstance(value, int): - return f'"{field}"={value}' - - return f"\"{field}\"='{self._apply_value(value)}'" - - def less_modifier(self, field: str, value: Union[int, str]) -> str: - if isinstance(value, int): - return f'"{field}"<{value}' - return f"\"{field}\"<'{self._apply_value(value)}'" - - def less_or_equal_modifier(self, field: str, value: Union[int, str]) -> str: - if isinstance(value, int): - return f'"{field}"<={value}' - return f"\"{field}\"<='{self._apply_value(value)}'" - - def greater_modifier(self, field: str, value: Union[int, str]) -> str: - if isinstance(value, int): - return f'"{field}">{value}' - return f"\"{field}\">'{self._apply_value(value)}'" - - def greater_or_equal_modifier(self, field: str, value: Union[int, str]) -> str: - if isinstance(value, int): - return f'"{field}">={value}' - return f"\"{field}\">='{self._apply_value(value)}'" - - def not_equal_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: - if isinstance(value, list): - return f"({self.or_token.join([self.not_equal_modifier(field=field, value=v) for v in value])})" - if isinstance(value, int): - return f'"{field}"!={value}' - return f"\"{field}\"!='{self._apply_value(value)}'" - - def contains_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: - if isinstance(value, list): - return f"({self.or_token.join(self.contains_modifier(field=field, value=v) for v in value)})" - return f"\"{field}\" ILIKE '%{self._apply_value(value)}%'" - - def endswith_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: - if isinstance(value, list): - return f"({self.or_token.join(self.endswith_modifier(field=field, value=v) for v in value)})" - return f"\"{field}\" ILIKE '%{self._apply_value(value)}'" - - def startswith_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: - if isinstance(value, list): - return f"({self.or_token.join(self.startswith_modifier(field=field, value=v) for v in value)})" - return f"\"{field}\" ILIKE '{self._apply_value(value)}%'" - - def regex_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: - if isinstance(value, list): - return f"({self.or_token.join(self.regex_modifier(field=field, value=v) for v in value)})" - return f"\"{field}\" IMATCHES '{value}'" - - def keywords(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: - if isinstance(value, list): - return f"({self.or_token.join(self.keywords(field=field, value=v) for v in value)})" - return f"UTF8(payload) ILIKE '%{self.apply_value(value)}%'" @render_manager.register -class QradarQueryRender(PlatformQueryRender): +class QradarQueryRender(AQLQueryRender): details: PlatformDetails = qradar_query_details - mappings: QradarMappings = qradar_mappings - - or_token = "OR" - and_token = "AND" - not_token = "NOT" - - field_value_map = QradarFieldValue(or_token=or_token) - query_pattern = "{prefix} AND {query} {functions}" - - def generate_prefix(self, log_source_signature: QradarLogSourceSignature) -> str: - table = str(log_source_signature) - extra_condition = log_source_signature.extra_condition - return f"SELECT UTF8(payload) FROM {table} WHERE {extra_condition}" - - def wrap_with_comment(self, value: str) -> str: - return f"/* {value} */" diff --git a/uncoder-core/app/translator/platforms/qualys/__init__.py b/uncoder-core/app/translator/platforms/qualys/__init__.py index 4e911f79..1e73d6b4 100644 --- a/uncoder-core/app/translator/platforms/qualys/__init__.py +++ b/uncoder-core/app/translator/platforms/qualys/__init__.py @@ -1 +1 @@ -from app.translator.platforms.qualys.renders.qualys_cti import QualysCTI +from app.translator.platforms.qualys.renders.qualys_cti import QualysCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/roota/__init__.py b/uncoder-core/app/translator/platforms/roota/__init__.py index e45d6e93..bf92db6c 100644 --- a/uncoder-core/app/translator/platforms/roota/__init__.py +++ b/uncoder-core/app/translator/platforms/roota/__init__.py @@ -1 +1 @@ -from app.translator.platforms.roota.parsers.roota import RootAParser +from app.translator.platforms.roota.parsers.roota import RootAParser # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/rsa_netwitness/__init__.py b/uncoder-core/app/translator/platforms/rsa_netwitness/__init__.py index 6538d106..1dbbd7ad 100644 --- a/uncoder-core/app/translator/platforms/rsa_netwitness/__init__.py +++ b/uncoder-core/app/translator/platforms/rsa_netwitness/__init__.py @@ -1 +1 @@ -from app.translator.platforms.rsa_netwitness.renders.rsa_netwitness_cti import RSANetwitnessCTI +from app.translator.platforms.rsa_netwitness.renders.rsa_netwitness_cti import RSANetwitnessCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/securonix/__init__.py b/uncoder-core/app/translator/platforms/securonix/__init__.py index 22132ff6..92f8b6af 100644 --- a/uncoder-core/app/translator/platforms/securonix/__init__.py +++ b/uncoder-core/app/translator/platforms/securonix/__init__.py @@ -1 +1 @@ -from app.translator.platforms.securonix.renders.securonix_cti import SecuronixCTI +from app.translator.platforms.securonix.renders.securonix_cti import SecuronixCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/sentinel_one/__init__.py b/uncoder-core/app/translator/platforms/sentinel_one/__init__.py index a92c51af..0ba5cbed 100644 --- a/uncoder-core/app/translator/platforms/sentinel_one/__init__.py +++ b/uncoder-core/app/translator/platforms/sentinel_one/__init__.py @@ -1 +1 @@ -from app.translator.platforms.sentinel_one.renders.s1_cti import S1EventsCTI +from app.translator.platforms.sentinel_one.renders.s1_cti import S1EventsCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/sigma/__init__.py b/uncoder-core/app/translator/platforms/sigma/__init__.py index 5109eaa4..488692b8 100644 --- a/uncoder-core/app/translator/platforms/sigma/__init__.py +++ b/uncoder-core/app/translator/platforms/sigma/__init__.py @@ -1,2 +1,2 @@ -from app.translator.platforms.sigma.parsers.sigma import SigmaParser -from app.translator.platforms.sigma.renders.sigma import SigmaRender +from app.translator.platforms.sigma.parsers.sigma import SigmaParser # noqa: F401 +from app.translator.platforms.sigma.renders.sigma import SigmaRender # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/snowflake/__init__.py b/uncoder-core/app/translator/platforms/snowflake/__init__.py index d2d9d6b9..c0eae983 100644 --- a/uncoder-core/app/translator/platforms/snowflake/__init__.py +++ b/uncoder-core/app/translator/platforms/snowflake/__init__.py @@ -1 +1 @@ -from app.translator.platforms.snowflake.renders.snowflake_cti import SnowflakeCTI +from app.translator.platforms.snowflake.renders.snowflake_cti import SnowflakeCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/splunk/__init__.py b/uncoder-core/app/translator/platforms/splunk/__init__.py index 1b78d9e3..01b538f9 100644 --- a/uncoder-core/app/translator/platforms/splunk/__init__.py +++ b/uncoder-core/app/translator/platforms/splunk/__init__.py @@ -1,5 +1,5 @@ -from app.translator.platforms.splunk.parsers.splunk import SplunkQueryParser -from app.translator.platforms.splunk.parsers.splunk_alert import SplunkAlertParser -from app.translator.platforms.splunk.renders.splunk import SplunkQueryRender -from app.translator.platforms.splunk.renders.splunk_alert import SplunkAlertRender -from app.translator.platforms.splunk.renders.splunk_cti import SplunkCTI +from app.translator.platforms.splunk.parsers.splunk import SplunkQueryParser # noqa: F401 +from app.translator.platforms.splunk.parsers.splunk_alert import SplunkAlertParser # noqa: F401 +from app.translator.platforms.splunk.renders.splunk import SplunkQueryRender # noqa: F401 +from app.translator.platforms.splunk.renders.splunk_alert import SplunkAlertRender # noqa: F401 +from app.translator.platforms.splunk.renders.splunk_cti import SplunkCTI # noqa: F401 diff --git a/uncoder-core/app/translator/platforms/sumo_logic/__init__.py b/uncoder-core/app/translator/platforms/sumo_logic/__init__.py index 43d79804..5d25614c 100644 --- a/uncoder-core/app/translator/platforms/sumo_logic/__init__.py +++ b/uncoder-core/app/translator/platforms/sumo_logic/__init__.py @@ -1 +1 @@ -from app.translator.platforms.sumo_logic.renders.sumologic_cti import SumologicCTI +from app.translator.platforms.sumo_logic.renders.sumologic_cti import SumologicCTI # noqa: F401 diff --git a/uncoder-core/requirements.txt b/uncoder-core/requirements.txt index 124d702e..12133e51 100644 --- a/uncoder-core/requirements.txt +++ b/uncoder-core/requirements.txt @@ -5,3 +5,4 @@ PyYAML~=6.0.1 colorama~=0.4.6 ruff==0.1.13 ujson==5.9.0 +xmltodict~=0.13.0 From 87274f85892667547c60d1bd56182b418110fb14 Mon Sep 17 00:00:00 2001 From: Nazar Gesyk Date: Wed, 22 May 2024 12:52:07 +0300 Subject: [PATCH 2/2] Managers fix --- uncoder-core/app/translator/managers.py | 84 +++++++++++++------------ 1 file changed, 43 insertions(+), 41 deletions(-) diff --git a/uncoder-core/app/translator/managers.py b/uncoder-core/app/translator/managers.py index cf552a5f..38fb1e63 100644 --- a/uncoder-core/app/translator/managers.py +++ b/uncoder-core/app/translator/managers.py @@ -1,21 +1,16 @@ from abc import ABC from functools import cached_property +from typing import ClassVar, Union from app.models.translation import TranslatorPlatform -from app.translator.core.exceptions.core import UnsupportedRootAParser +from app.translator.core.exceptions.core import UnsupportedPlatform, UnsupportedRootAParser +from app.translator.core.parser import QueryParser +from app.translator.core.render import QueryRender +from app.translator.core.render_cti import RenderCTI -class Manager(ABC): - platforms = {} - - def register(self, cls): - self.platforms[cls.details.platform_id] = cls() - return cls - - def get(self, platform_id: str): # noqa: ANN201 - if platform := self.platforms.get(platform_id): - return platform - raise UnsupportedRootAParser(parser=platform_id) +class PlatformManager(ABC): + platforms: ClassVar[dict[str, Union[QueryParser, QueryRender, RenderCTI]]] = {} def all_platforms(self) -> list: return list(self.platforms.keys()) @@ -40,54 +35,61 @@ def get_platforms_details(self) -> list[TranslatorPlatform]: return sorted(platforms, key=lambda platform: platform.group_name) -class ParserManager(Manager): - platforms = {} - supported_by_roota_platforms = {} - main_platforms = {} +class ParserManager(PlatformManager): + supported_by_roota_platforms: ClassVar[dict[str, QueryParser]] = {} + main_platforms: ClassVar[dict[str, QueryParser]] = {} - def get_supported_by_roota(self, platform_id: str): # noqa: ANN201 + def get(self, platform_id: str) -> QueryParser: + if platform := self.platforms.get(platform_id): + return platform + raise UnsupportedPlatform(platform=platform_id, is_parser=True) + + def register(self, cls: type[QueryParser]) -> type[QueryParser]: + self.platforms[cls.details.platform_id] = cls() + return cls + + def get_supported_by_roota(self, platform_id: str) -> QueryParser: if platform := self.supported_by_roota_platforms.get(platform_id): return platform raise UnsupportedRootAParser(parser=platform_id) - def register_supported_by_roota(self, cls): + def register_supported_by_roota(self, cls: type[QueryParser]) -> type[QueryParser]: parser = cls() self.supported_by_roota_platforms[cls.details.platform_id] = parser self.platforms[cls.details.platform_id] = parser return cls - def register_main(self, cls): + def register_main(self, cls: type[QueryParser]) -> type[QueryParser]: parser = cls() self.main_platforms[cls.details.platform_id] = parser self.platforms[cls.details.platform_id] = parser return cls - @cached_property - def get_platforms_details(self) -> list[TranslatorPlatform]: - platforms = [ - TranslatorPlatform( - id=platform.details.platform_id, - name=platform.details.name, - code=platform.details.platform_id, - group_name=platform.details.group_name, - group_id=platform.details.group_id, - platform_name=platform.details.platform_name, - platform_id=platform.details.platform_id, - alt_platform_name=platform.details.alt_platform_name, - alt_platform=platform.details.alt_platform, - first_choice=platform.details.first_choice, - ) - for platform in self.platforms.values() - ] - return sorted(platforms, key=lambda platform: platform.group_name) +class RenderManager(PlatformManager): + platforms: ClassVar[dict[str, QueryRender]] = {} + + def get(self, platform_id: str) -> QueryRender: + if platform := self.platforms.get(platform_id): + return platform + raise UnsupportedPlatform(platform=platform_id) + + def register(self, cls: type[QueryRender]) -> type[QueryRender]: + self.platforms[cls.details.platform_id] = cls() + return cls -class RenderManager(Manager): - platforms = {} +class RenderCTIManager(PlatformManager): + platforms: ClassVar[dict[str, RenderCTI]] = {} -class RenderCTIManager(Manager): - platforms = {} + def get(self, platform_id: str) -> RenderCTI: + if platform := self.platforms.get(platform_id): + return platform + raise UnsupportedPlatform(platform=platform_id) + + def register(self, cls: type[RenderCTI]) -> type[RenderCTI]: + self.platforms[cls.details.platform_id] = cls() + return cls parser_manager = ParserManager() pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy