diff --git a/translator/app/translator/core/models/parser_output.py b/translator/app/translator/core/models/parser_output.py index 87b4b973..a17dbc43 100644 --- a/translator/app/translator/core/models/parser_output.py +++ b/translator/app/translator/core/models/parser_output.py @@ -5,6 +5,7 @@ from app.translator.core.custom_types.meta_info import SeverityType from app.translator.core.mapping import DEFAULT_MAPPING_NAME +from app.translator.core.models.field import Field from app.translator.core.models.functions.base import ParsedFunctions @@ -17,6 +18,7 @@ def __init__( description: Optional[str] = None, author: Optional[str] = None, date: Optional[str] = None, + fields: Optional[list[Field]] = None, license_: Optional[str] = None, severity: Optional[str] = None, references: Optional[list[str]] = None, @@ -25,6 +27,7 @@ def __init__( status: Optional[str] = None, false_positives: Optional[list[str]] = None, source_mapping_ids: Optional[list[str]] = None, + parsed_logsources: Optional[dict] = None ) -> None: self.id = id_ or str(uuid.uuid4()) self.title = title or "" @@ -32,6 +35,7 @@ def __init__( self.author = author or "" self.date = date or datetime.now().date().strftime("%Y-%m-%d") self.license = license_ or "DRL 1.1" + self.fields = fields or [] self.severity = severity or SeverityType.low self.references = references or [] self.tags = tags or [] @@ -39,6 +43,7 @@ def __init__( self.status = status or "stable" self.false_positives = false_positives or [] self.source_mapping_ids = source_mapping_ids or [DEFAULT_MAPPING_NAME] + self.parsed_logsources = parsed_logsources or {} @dataclass diff --git a/translator/app/translator/mappings/platforms/logrhythm_axon/default.yml b/translator/app/translator/mappings/platforms/logrhythm_axon/default.yml new file mode 100644 index 00000000..6dbfd843 --- /dev/null +++ b/translator/app/translator/mappings/platforms/logrhythm_axon/default.yml @@ -0,0 +1,304 @@ +platform: LogRhythm Axon +source: default +description: Text that describe current mapping + +field_mapping: + EventID: vendor_information.id + Channel: general_information.log_source.type_name + ComputerName: origin.host.name + FileName: object.file.name + ProcessId: object.process.id + Image: object.process.name + AccountEmail: unattributed.account.email_address + ContextInfo: general_information.raw_message + CurrentDirectory: object.process.path + ParentProcessId: object.process.parent_process.id + ParentImage: object.process.parent_process.path + ParentCommandLine: object.process.parent_process.command_line + TargetFilename: object.file.name + SourceIp: origin.host.ip_address.value + SourceHostname: origin.host.name + SourcePort: origin.host.network_port.value + DestinationIp: target.host.ip_address.value + DestinationHostname: + - target.host.name + - target.host.domain + DestinationPort: target.host.network_port.value + DestinationPortName: action.network.protocol.name + ImageLoaded: object.file.path + SignatureStatus: object.process.signature.status + SourceProcessId: object.process.id + SourceImage: object.process.name + Device: object.process.path + Destination: object.process.name + QueryName: action.dns.query + QueryStatus: action.dns.result + CommandName: object.process.command_line + CommandPath: object.process.path + HostApplication: object.script.command_line + HostName: origin.host.name + ScriptName: object.script.name + ScriptBlockText: object.script.command_line + ScriptBlockId: object.script.id + Application: object.process.name + ClientAddress: origin.host.ip_address.value + ClientName: origin.host.domain.name + DestAddress: target.host.ip_address.value + DestPort: target.host.network_port.value + IpAddress: origin.host.ip_address.value + IpPort: origin.host.network_port.value + NewProcessId: object.process.id + NewProcessName: object.process.name + ParentProcessName: object.process.parent_process.name + ProcessName: object.process.name + SourceAddress: origin.host.ip_address.value + WorkstationName: origin.host.name + destination.port: target.host.network_port.value + dst: target.host.ip_address.value + dst_ip: target.host.ip_address.value + dst_port: target.host.network_port.value + network_application: + - action.network.protocol.name + - object.url.protocol + network_protocol: action.network.protocol.name + proto: action.network.protocol.name + src: origin.host.ip_address.value + src_ip: origin.host.ip_address.value + src_port: origin.host.network_port.value + action: action.command + mqtt_action: action.command + smb_action: action.command + tunnel_action: action.command + arg: object.process.command_args + ftp_arg: object.process.command_args + mysql_arg: object.process.command_args + pop3_arg: object.process.command_args + client: origin.host.ip_address.value + command: action.command + ftp_command: action.command + irc_command: action.command + pop3_command: action.command + duration: action.duration + from: origin.account.email_address + kerberos_from: origin.account.email_address + smtp_from: origin.account.email_address + method: action.network.http_method + http_method: action.network.http_method + sip_method: action.network.http_method + name: object.file.name + smb_files_name: object.file.name + software_name: object.file.name + weird_name: object.file.name + path: object.file.path + smb_mapping_path: object.file.path + smb_files_path: object.file.path + smtp_files_path: object.file.path + password: object.file.name + reply_to: target.account.email_address + response_body_len: action.network.byte_information.received + request_body_len: action.network.byte_information.sent + rtt: action.duration + status_code: action.result.code + known_certs_subject: object.certificate.subject + sip_subject: object.email_message.subject + smtp_subject: object.email_message.subject + ssl_subject: object.certificate.subject + username: origin.account.name + uri: object.url.path + user: origin.account.name + user_agent: action.user_agent + http_user_agent: action.user_agent + gquic_user_agent: action.user_agent + sip_user_agent: action.user_agent + smtp_user_agent: action.user_agent + version: object.file.version + gquic_version: object.file.version + http_version: object.file.version + ntp_version: object.file.version + socks_version: object.file.version + snmp_version: object.file.version + ssh_version: object.file.version + tls_version: object.file.version + answer: action.dns.result + question_length: action.network.byte_information.total + record_type: action.dns.record_type + parent_domain: target.host.domain + cs-bytes: action.network.byte_information.received + r-dns: target.host.domain + sc-bytes: action.network.byte_information.received + sc-status: action.result.code + c-uri: object.url.complete + c-uri-extension: object.url.type + c-uri-query: object.url.query + c-uri-stem: object.url.complete + c-useragent: action.user_agent + cs-host: + - target.host.name + - target.host.domain + cs-method: action.network.http_method + cs-version: object.file.version + uid: action.session.id + endpoint: origin.host.name + domain: target.host.domain + host_name: target.host.name + client_fqdn: origin.host.name + requested_addr: target.host.ip_address.value + server_addr: target.host.ip_address.value + qtype: action.dns.record_type + qtype_name: action.dns.record_type + query: action.dns.query + rcode_name: action.dns.result + md5: unattributed.hash.md5 + sha1: unattributed.hash.sha1 + sha256: unattributed.hash.sha256 + sha512: unattributed.hash.sha512 + filename: object.file.name + host: + - unattributed.host.name + - unattributed.host.ip_address.value + domainname: unattributed.host.name + hostname: unattributed.host.name + server_nb_computer_name: unattributed.host.name + server_tree_name: unattributed.host.name + server_dns_computer_name: unattributed.host.name + machine: unattributed.host.name + os: origin.host.os.platform + mac: unattributed.host.mac_address + result: + - action.result.message + - action.result.code + - action.result.reason + mailfrom: origin.account.email_address + rcptto: target.account.email_address + second_received: target.account.email_address + server_name: unattributed.host.name + c-ip: origin.host.ip_address.value + cs-uri: object.url.path + cs-uri-query: object.url.query + cs-uri-stem: object.url.complete + clientip: origin.host.ip_address.value + clientIP: origin.host.ip_address.value + dest_domain: + - target.host.name + - target.host.domain + dest_ip: target.host.ip_address.value + dest_port: target.host.network_port.value + agent.version: object.file.version + destination.hostname: + - target.host.name + - target.host.domain + DestinationAddress: + - target.host.name + - target.host.domain + - target.host.ip_address.value + DestinationIP: target.host.ip_address.value + dst-ip: target.host.ip_address.value + dstip: target.host.ip_address.value + dstport: target.host.ip_address.value + Host: target.host.name + HostVersion: object.file.version + http_host: + - target.host.name + - target.host.domain + - target.host.ip_address.value + http_uri: object.url.path + http_url: object.url.complete + http.request.url-query-params: object.url.query + HttpMethod: action.network.http_method + in_url: object.url.path + post_url_parameter: object.url.path + Request_Url: object.url.complete + request_url: object.url.complete + request_URL: object.url.complete + RequestUrl: object.url.complete + resource.url: object.url.path + resource.URL: object.url.path + sc_status: action.result.code + sender_domain: + - target.host.name + - target.host.domain + service.response_code: action.result.code + source: + - origin.host.name + - origin.host.domain.name + - origin.host.ip_address.value + SourceAddr: origin.host.ip_address.value + SourceIP: origin.host.ip_address.value + SourceNetworkAddress: origin.host.ip_address.value + srcip: origin.host.ip_address.value + Status: action.result.code + status: action.result.code + url: object.url.path + URL: object.url.path + url_query: object.url.query + url.query: object.url.query + uri_path: object.url.path + user_agent.name: action.user_agent + user-agent: action.user_agent + User-Agent: action.user_agent + useragent: action.user_agent + UserAgent: action.user_agent + User_Agent: action.user_agent + web_dest: + - target.host.name + - target.host.domain + - target.host.ip_address.value + - object.url.domain + web.dest: + - target.host.name + - target.host.domain + - target.host.ip_address.value + - object.url.domain + Web.dest: + - target.host.name + - target.host.domain + - target.host.ip_address.value + - object.url.domain + web.host: + - target.host.name + - target.host.domain + - target.host.ip_address.value + - object.url.domain + Web.host: + - target.host.name + - target.host.domain + - target.host.ip_address.value + - object.url.domain + web_method: action.network.http_method + Web_method: action.network.http_method + web.method: action.network.http_method + Web.method: action.network.http_method + web_src: origin.host.ip_address.value + web_status: action.result.code + Web_status: action.result.code + web.status: action.result.code + Web.status: action.result.code + web_uri: object.url.path + web_url: object.url.complete + destination.ip: target.host.ip_address.value + source.ip: origin.host.ip_address.value + source.port: origin.host.ip_address.value + Computer: + - target.host.name + - target.host.domain + - target.host.ip_address.value + OriginalFileName: object.file.name + User: origin.account.name + EventType: action.command + TargetObject: + - object.registry_object.key + - object.registry_object.path + - object.resource.name + CommandLine: object.process.command_line + type: + - action.command + - action.type + - action.session.type + a0: + - object.process.command_line + - object.process.command_args + - object.process.name + cs-user-agent: action.user_agent + blocked: + - action.message + - action.result.reason \ No newline at end of file diff --git a/translator/app/translator/platforms/__init__.py b/translator/app/translator/platforms/__init__.py index 6b344bad..af92d5be 100644 --- a/translator/app/translator/platforms/__init__.py +++ b/translator/app/translator/platforms/__init__.py @@ -24,6 +24,8 @@ from app.translator.platforms.graylog.renders.graylog import GraylogRender from app.translator.platforms.graylog.renders.graylog_cti import GraylogCTI from app.translator.platforms.logpoint.renders.logpoint_cti import LogpointCTI +from app.translator.platforms.logrhythm_axon.renders.logrhythm_axon_query import LogRhythmAxonQueryRender +from app.translator.platforms.logrhythm_axon.renders.logrhythm_axon_rule import LogRhythmAxonRuleRender from app.translator.platforms.logscale.parsers.logscale import LogScaleParser from app.translator.platforms.logscale.parsers.logscale_alert import LogScaleAlertParser from app.translator.platforms.logscale.renders.logscale_cti import LogScaleCTI @@ -71,6 +73,8 @@ ChronicleSecurityRuleRender(), AthenaQueryRender(), ElasticSearchQueryRender(), + LogRhythmAxonQueryRender(), + LogRhythmAxonRuleRender(), LogScaleQueryRender(), LogScaleAlertRender(), ElasticSearchRuleRender(), diff --git a/translator/app/translator/platforms/logrhythm_axon/__init__.py b/translator/app/translator/platforms/logrhythm_axon/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/translator/app/translator/platforms/logrhythm_axon/const.py b/translator/app/translator/platforms/logrhythm_axon/const.py new file mode 100644 index 00000000..406cb146 --- /dev/null +++ b/translator/app/translator/platforms/logrhythm_axon/const.py @@ -0,0 +1,45 @@ +from app.translator.core.custom_types.meta_info import SeverityType +from app.translator.core.models.platform_details import PlatformDetails + +DEFAULT_LOGRHYTHM_AXON_RULE = { + "title": "Default LogRhythm Axon rule", + "version": 3, + "description": "Default LogRhythm Axon rule description.", + "observationPipeline": { + "pattern": { + "operations": [ + { + "touched": True, + "blockType": "LOG_OBSERVED", + "logObserved": {"filter": "query", "groupByFields": []}, + "operationType": "WHERE_PATTERN_OPERATION", + "isOutOfBoxRule": False, + "ruleElementKey": "rule_id", + } + ], + "afterMatchSkipStrategy": "SKIP_PAST_LAST_EVENT", + }, + "commonEvents": ["28de4ee0-ca58-40f5-9ac7-ca38edf7883a", "348a37e6-590e-4767-baae-a5c3951391ae"], + "metadataFields": {"threat.severity": SeverityType.medium}, + }, +} + +PLATFORM_DETAILS = {"group_id": "axon-ads", "group_name": "LogRhythm Axon"} + +LOGRHYTHM_AXON_QUERY_DETAILS = { + "siem_type": "axon-ads-query", + "name": "LogRhythm Axon Query", + "platform_name": "Query", + **PLATFORM_DETAILS, +} + +LOGRHYTHM_AXON_RULE_DETAILS = { + "siem_type": "axon-ads-rule", + "name": "LogRhythm Axon Rule", + "platform_name": "Rule", + "first_choice": 0, + **PLATFORM_DETAILS, +} + +logrhythm_axon_query_details = PlatformDetails(**LOGRHYTHM_AXON_QUERY_DETAILS) +logrhythm_axon_rule_details = PlatformDetails(**LOGRHYTHM_AXON_RULE_DETAILS) diff --git a/translator/app/translator/platforms/logrhythm_axon/mapping.py b/translator/app/translator/platforms/logrhythm_axon/mapping.py new file mode 100644 index 00000000..debf3e1f --- /dev/null +++ b/translator/app/translator/platforms/logrhythm_axon/mapping.py @@ -0,0 +1,47 @@ +from typing import Optional + +from app.translator.core.mapping import DEFAULT_MAPPING_NAME, BasePlatformMappings, LogSourceSignature, SourceMapping + + +class LogRhythmAxonLogSourceSignature(LogSourceSignature): + def __init__(self, default_source: Optional[dict] = None): + self._default_source = default_source or {} + + def is_suitable(self) -> bool: + return True + + def __str__(self) -> str: + return "general_information.log_source.type_name" + + +class LogRhythmAxonMappings(BasePlatformMappings): + def prepare_mapping(self) -> dict[str, SourceMapping]: + source_mappings = {} + for mapping_dict in self._loader.load_siem_mappings(self._platform_dir): + log_source_signature = self.prepare_log_source_signature(mapping=mapping_dict) + fields_mapping = self.prepare_fields_mapping(field_mapping=mapping_dict.get("field_mapping", {})) + source_mappings[DEFAULT_MAPPING_NAME] = SourceMapping( + source_id=DEFAULT_MAPPING_NAME, log_source_signature=log_source_signature, fields_mapping=fields_mapping + ) + return source_mappings + + def prepare_log_source_signature(self, mapping: dict) -> LogRhythmAxonLogSourceSignature: + default_log_source = mapping.get("default_log_source") + return LogRhythmAxonLogSourceSignature(default_source=default_log_source) + + def get_suitable_source_mappings(self, field_names: list[str]) -> list[SourceMapping]: + suitable_source_mappings = [] + for source_mapping in self._source_mappings.values(): + if source_mapping.source_id == DEFAULT_MAPPING_NAME: + continue + + if source_mapping.fields_mapping.is_suitable(field_names): + suitable_source_mappings.append(source_mapping) + + if not suitable_source_mappings: + suitable_source_mappings = [self._source_mappings[DEFAULT_MAPPING_NAME]] + + return suitable_source_mappings + + +logrhythm_axon_mappings = LogRhythmAxonMappings(platform_dir="logrhythm_axon") diff --git a/translator/app/translator/platforms/logrhythm_axon/renders/__init__.py b/translator/app/translator/platforms/logrhythm_axon/renders/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/translator/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py b/translator/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py new file mode 100644 index 00000000..ad9703ef --- /dev/null +++ b/translator/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py @@ -0,0 +1,246 @@ +""" +Uncoder IO Community Edition License +----------------------------------------------------------------- +Copyright (c) 2023 SOC Prime, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +----------------------------------------------------------------- +""" +from typing import Union + +from app.translator.const import DEFAULT_VALUE_TYPE +from app.translator.core.custom_types.tokens import LogicalOperatorType, OperatorType +from app.translator.core.exceptions.core import StrictPlatformException +from app.translator.core.exceptions.render import BaseRenderException +from app.translator.core.mapping import LogSourceSignature, SourceMapping +from app.translator.core.models.field import FieldValue, Keyword +from app.translator.core.models.functions.base import ParsedFunctions +from app.translator.core.models.identifier import Identifier +from app.translator.core.models.parser_output import MetaInfoContainer +from app.translator.core.models.platform_details import PlatformDetails +from app.translator.core.render import BaseQueryFieldValue, BaseQueryRender +from app.translator.platforms.logrhythm_axon.const import logrhythm_axon_query_details +from app.translator.platforms.logrhythm_axon.mapping import LogRhythmAxonMappings, logrhythm_axon_mappings +from app.translator.platforms.microsoft.escape_manager import microsoft_escape_manager + + +class LogRhythmRegexRenderException(BaseRenderException): + ... + + +class LogRhythmAxonFieldValue(BaseQueryFieldValue): + details: PlatformDetails = logrhythm_axon_query_details + escape_manager = microsoft_escape_manager + + def __is_complex_regex(self, regex: str) -> bool: + regex_items = ("[", "]", "(", ")", "{", "}", "+", "?", "^", "$", "\\d", "\\w", "\\s", "-") + return any(v in regex for v in regex_items) + + def __is_contain_regex_items(self, value: str) -> bool: + regex_items = ("[", "]", "(", ")", "{", "}", "*", "+", "?", "^", "$", "|", ".", "\\d", "\\w", "\\s", "\\", "-") + return any(v in value for v in regex_items) + + def __regex_to_str_list(self, value: Union[int, str]) -> list[list[str]]: # noqa: PLR0912 + value_groups = [] + + stack = [] # [(element: str, escaped: bool)] + + for char in value: + if char == "\\": + if stack and stack[-1][0] == "\\" and stack[-1][1] is False: + stack.pop() + stack.append((char, True)) + else: + stack.append(("\\", False)) + elif char == "|": + if stack and stack[-1][0] == "\\" and stack[-1][1] is False: + stack.pop() + stack.append((char, True)) + elif stack: + value_groups.append("".join(element[0] for element in stack)) + stack = [] + else: + stack.append((char, False)) + if stack: + value_groups.append("".join(element[0] for element in stack if element[0] != "\\" or element[-1] is True)) + + joined_components = [] + for value_group in value_groups: + inner_joined_components = [] + not_joined_components = [] + for i in range(len(value_group)): + if value_group[i] == "*" and i > 0 and value_group[i - 1] != "\\": + inner_joined_components.append("".join(not_joined_components)) + not_joined_components = [] + else: + not_joined_components.append(value_group[i]) + if not_joined_components: + inner_joined_components.append("".join(not_joined_components)) + joined_components.append(inner_joined_components) + + return joined_components + + @staticmethod + def __escape_value(value: Union[int, str]) -> Union[int, str]: + return value.replace("'", "''") if isinstance(value, str) else value + + def equal_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, str): + return f'{field} = "{self.__escape_value(value)}"' + if isinstance(value, list): + prepared_values = ", ".join(f"{self.__escape_value(v)}" for v in value) + operator = "IN" if all(isinstance(v, str) for v in value) else "in" + return f"{field} {operator} [{prepared_values}]" + return f'{field} = "{self.apply_value(value)}"' + + def less_modifier(self, field: str, value: Union[int, str]) -> str: + if isinstance(value, int): + return f"{field} < {value}" + return f"{field} < '{self.apply_value(value)}'" + + def less_or_equal_modifier(self, field: str, value: Union[int, str]) -> str: + if isinstance(value, int): + return f"{field} <= {value}" + return f"{field} <= {self.apply_value(value)}" + + def greater_modifier(self, field: str, value: Union[int, str]) -> str: + if isinstance(value, int): + return f"{field} > {value}" + return f"{field} > {self.apply_value(value)}" + + def greater_or_equal_modifier(self, field: str, value: Union[int, str]) -> str: + if isinstance(value, int): + return f"{field} >= {value}" + return f"{field} >= {self.apply_value(value)}" + + def not_equal_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join([self.not_equal_modifier(field=field, value=v) for v in value])})" + if isinstance(value, int): + return f"{field} != {value}" + return f"{field} != {self.apply_value(value)}" + + def contains_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join(self.contains_modifier(field=field, value=v) for v in value)})" + if isinstance(value, str) and self.__is_contain_regex_items(value): + if self.__is_complex_regex(value): + raise LogRhythmRegexRenderException + values = self.__regex_to_str_list(value) + return ( + "(" + + self.or_token.join( + " AND ".join(f'{field} CONTAINS "{self.__escape_value(value)}"' for value in value_list) + for value_list in values + ) + + ")" + ) + return f'{field} CONTAINS "{self.__escape_value(value)}"' + + def endswith_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join(self.endswith_modifier(field=field, value=v) for v in value)})" + value = f".*{self.__escape_value(value)}" if not value.startswith(".*") else self.__escape_value(value) + return f'{field} matches "{value}$"' + + def startswith_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join(self.startswith_modifier(field=field, value=v) for v in value)})" + value = f"{self.__escape_value(value)}.*" if not value.endswith(".*") else self.__escape_value(value) + return f'{field} matches "^{self.__escape_value(value)}"' + + def __regex_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + return f'{field} matches "{value}"' + + def regex_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: + if isinstance(value, list): + return f"({self.or_token.join(self.__regex_modifier(field=field, value=v) for v in value)})" + return self.__regex_modifier(field, value) + + +class LogRhythmAxonQueryRender(BaseQueryRender): + details: PlatformDetails = logrhythm_axon_query_details + + or_token = "OR" + and_token = "AND" + not_token = "NOT" + + field_value_map = LogRhythmAxonFieldValue(or_token=or_token) + query_pattern = "{prefix} AND {query}" + + mappings: LogRhythmAxonMappings = logrhythm_axon_mappings + comment_symbol = "//" + is_multi_line_comment = True + is_strict_mapping = True + + def generate_prefix(self, log_source_signature: LogSourceSignature) -> str: + return str(log_source_signature) + + def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapping: SourceMapping) -> str: + if isinstance(token, FieldValue): + try: + mapped_fields = self.map_field(token.field, source_mapping) + except StrictPlatformException: + try: + return self.field_value_map.apply_field_value( + field="general_information.raw_message", + operator=Identifier(token_type=OperatorType.CONTAINS), + value=token.value, + ) + except LogRhythmRegexRenderException as exc: + raise LogRhythmRegexRenderException( + f"Uncoder does not support complex regexp for unmapped field:" + f" {token.field.source_name} for LogRhythm Axon" + ) from exc + if len(mapped_fields) > 1: + return self.group_token % self.operator_map[LogicalOperatorType.OR].join( + [ + self.field_value_map.apply_field_value(field=field, operator=token.operator, value=token.value) + for field in mapped_fields + ] + ) + return self.field_value_map.apply_field_value( + field=mapped_fields[0], operator=token.operator, value=token.value + ) + + if isinstance(token, Keyword): + return self.field_value_map.apply_field_value(field=None, operator=token.operator, value=token.value) + if token.token_type in LogicalOperatorType: + return self.operator_map.get(token.token_type) + + return token.token_type + + def generate(self, query: list, meta_info: MetaInfoContainer, functions: ParsedFunctions) -> str: + queries_map = {} + source_mappings = self._get_source_mappings(meta_info.source_mapping_ids) + + for source_mapping in source_mappings: + prefix = self.generate_prefix(source_mapping.log_source_signature) + if "product" in meta_info.parsed_logsources: + prefix = f"{prefix} CONTAINS {meta_info.parsed_logsources['product'][0]}" + else: + prefix = f"{prefix} CONTAINS anything" + + result = self.generate_query(query=query, source_mapping=source_mapping) + + finalized_query = self.finalize_query( + prefix=prefix, + query=result, + functions=self.generate_functions(functions.functions, source_mapping), + not_supported_functions=functions.not_supported, + meta_info=meta_info, + source_mapping=source_mapping, + ) + queries_map[source_mapping.source_id] = finalized_query + + return self.finalize(queries_map) diff --git a/translator/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_rule.py b/translator/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_rule.py new file mode 100644 index 00000000..014c9ebb --- /dev/null +++ b/translator/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_rule.py @@ -0,0 +1,108 @@ +""" +Uncoder IO Community Edition License +----------------------------------------------------------------- +Copyright (c) 2023 SOC Prime, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +----------------------------------------------------------------- +""" +import copy +import json +from typing import Optional + +from app.translator.core.custom_types.meta_info import SeverityType +from app.translator.core.mapping import SourceMapping +from app.translator.core.models.parser_output import MetaInfoContainer +from app.translator.core.models.platform_details import PlatformDetails +from app.translator.platforms.logrhythm_axon.const import DEFAULT_LOGRHYTHM_AXON_RULE, logrhythm_axon_rule_details +from app.translator.platforms.logrhythm_axon.renders.logrhythm_axon_query import ( + LogRhythmAxonFieldValue, + LogRhythmAxonQueryRender, +) +from app.translator.tools.utils import get_rule_description_str + +_SEVERITIES_MAP = { + SeverityType.critical: SeverityType.critical, + SeverityType.high: SeverityType.high, + SeverityType.medium: SeverityType.medium, + SeverityType.low: SeverityType.low, + SeverityType.informational: SeverityType.low, +} + + +class LogRhythmAxonRuleFieldValue(LogRhythmAxonFieldValue): + details: PlatformDetails = logrhythm_axon_rule_details + + +class LogRhythmAxonRuleRender(LogRhythmAxonQueryRender): + details: PlatformDetails = logrhythm_axon_rule_details + or_token = "or" + field_value_map = LogRhythmAxonRuleFieldValue(or_token=or_token) + + def __create_mitre_threat(self, meta_info: MetaInfoContainer) -> tuple[list, list]: + tactics = set() + techniques = [] + + for tactic in meta_info.mitre_attack.get("tactics"): + tactics.add(tactic["tactic"]) + + for technique in meta_info.mitre_attack.get("techniques"): + if technique.get("tactic"): + for tactic in technique["tactic"]: + tactics.add(tactic) + techniques.append(technique["technique_id"]) + + return sorted(tactics), sorted(techniques) + + def finalize_query( + self, + prefix: str, + query: str, + functions: str, + meta_info: Optional[MetaInfoContainer] = None, + source_mapping: Optional[SourceMapping] = None, + not_supported_functions: Optional[list] = None, + *args, # noqa: ARG002 + **kwargs, # noqa: ARG002 + ) -> str: + query = super().finalize_query(prefix=prefix, query=query, functions=functions) + rule = copy.deepcopy(DEFAULT_LOGRHYTHM_AXON_RULE) + rule["observationPipeline"]["pattern"]["operations"][0]["logObserved"]["filter"] = query + rule["title"] = meta_info.title + rule["description"] = get_rule_description_str( + description=meta_info.description or rule["description"], + author=meta_info.author, + license_=meta_info.license, + ) + rule["observationPipeline"]["pattern"]["operations"][0]["ruleElementKey"] = meta_info.id + rule["observationPipeline"]["metadataFields"]["threat.severity"] = _SEVERITIES_MAP.get( + meta_info.severity, SeverityType.medium + ) + if tactics := meta_info.mitre_attack.get("tactics"): + rule["observationPipeline"]["metadataFields"]["threat.mitre_tactic"] = ", ".join( + f"{i['external_id']}:{i['tactic']}" for i in tactics + ) + if techniques := meta_info.mitre_attack.get("techniques"): + rule["observationPipeline"]["metadataFields"]["threat.mitre_technique"] = ", ".join( + f"{i['technique_id']}:{i['technique']}" for i in techniques + ) + if meta_info.fields: + rule["observationPipeline"]["pattern"]["operations"][0]["logObserved"]["groupByFields"] = [ + self.map_field(field, source_mapping)[0] for field in meta_info.fields + ] + + json_rule = json.dumps(rule, indent=4, sort_keys=False) + if not_supported_functions: + rendered_not_supported = self.render_not_supported_functions(not_supported_functions) + return json_rule + rendered_not_supported + return json_rule diff --git a/translator/app/translator/platforms/sigma/parsers/sigma.py b/translator/app/translator/platforms/sigma/parsers/sigma.py index ca878fac..f2139e7c 100644 --- a/translator/app/translator/platforms/sigma/parsers/sigma.py +++ b/translator/app/translator/platforms/sigma/parsers/sigma.py @@ -21,7 +21,7 @@ from app.translator.core.exceptions.core import SigmaRuleValidationException from app.translator.core.mixins.rule import YamlRuleMixin -from app.translator.core.models.field import FieldValue +from app.translator.core.models.field import FieldValue, Field from app.translator.core.models.parser_output import MetaInfoContainer, SiemContainer from app.translator.core.models.platform_details import PlatformDetails from app.translator.core.tokenizer import QueryTokenizer @@ -43,13 +43,20 @@ def __parse_false_positives(false_positives: Union[str, list[str], None]) -> lis return [i.strip() for i in false_positives.split(",")] return false_positives - def _get_meta_info(self, rule: dict, source_mapping_ids: list[str]) -> MetaInfoContainer: + def _get_meta_info( + self, + rule: dict, + source_mapping_ids: list[str], + parsed_logsources: dict, + sigma_fields_tokens: Union[list[Field], None] = None + ) -> MetaInfoContainer: return MetaInfoContainer( title=rule.get("title"), id_=rule.get("id"), description=rule.get("description"), author=rule.get("author"), date=rule.get("date"), + fields=sigma_fields_tokens, references=rule.get("references", []), license_=rule.get("license"), mitre_attack=self.parse_mitre_attack(rule.get("tags", [])), @@ -58,6 +65,7 @@ def _get_meta_info(self, rule: dict, source_mapping_ids: list[str]) -> MetaInfoC tags=sorted(set(rule.get("tags", []))), false_positives=self.__parse_false_positives(rule.get("falsepositives")), source_mapping_ids=source_mapping_ids, + parsed_logsources=parsed_logsources ) def __validate_rule(self, rule: dict): @@ -77,9 +85,17 @@ def parse(self, text: str) -> SiemContainer: field_names = [field.source_name for field in field_tokens] source_mappings = self.mappings.get_suitable_source_mappings(field_names=field_names, **log_sources) QueryTokenizer.set_field_tokens_generic_names_map(field_tokens, source_mappings, self.mappings.default_mapping) + sigma_fields_tokens = None + if sigma_fields := sigma_rule.get('fields'): + sigma_fields_tokens = [Field(source_name=field) for field in sigma_fields] + QueryTokenizer.set_field_tokens_generic_names_map(sigma_fields_tokens, source_mappings, + self.mappings.default_mapping) return SiemContainer( query=tokens, meta_info=self._get_meta_info( - rule=sigma_rule, source_mapping_ids=[source_mapping.source_id for source_mapping in source_mappings] - ), + rule=sigma_rule, + source_mapping_ids=[source_mapping.source_id for source_mapping in source_mappings], + sigma_fields_tokens=sigma_fields_tokens, + parsed_logsources=log_sources + ) ) pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy