From ae7b8bb34347df30a7b959ad950a3ae6bf94db87 Mon Sep 17 00:00:00 2001 From: Oleksandr Volha Date: Mon, 1 Jul 2024 18:05:35 +0300 Subject: [PATCH 1/2] palo alto datamodel mapping usage --- uncoder-core/app/translator/core/render.py | 50 ++++++++------- .../forti_siem/renders/forti_siem_rule.py | 62 ++++++++----------- .../renders/logrhythm_axon_query.py | 48 ++++++-------- .../palo_alto/renders/cortex_xsiam.py | 35 +++++++++-- 4 files changed, 107 insertions(+), 88 deletions(-) diff --git a/uncoder-core/app/translator/core/render.py b/uncoder-core/app/translator/core/render.py index b66f4430..991d2f8c 100644 --- a/uncoder-core/app/translator/core/render.py +++ b/uncoder-core/app/translator/core/render.py @@ -397,37 +397,45 @@ def generate_raw_log_fields(self, fields: list[Field], source_mapping: SourceMap defined_raw_log_fields.append(prefix) return "\n".join(defined_raw_log_fields) + def _generate_from_tokenized_query_container_by_source_mapping( + self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping + ) -> str: + rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping) + prefix = self.generate_prefix(source_mapping.log_source_signature, rendered_functions.rendered_prefix) + + if source_mapping.raw_log_fields: + defined_raw_log_fields = self.generate_raw_log_fields( + fields=query_container.meta_info.query_fields, source_mapping=source_mapping + ) + prefix += f"\n{defined_raw_log_fields}" + query = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping) + not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported + return self.finalize_query( + prefix=prefix, + query=query, + functions=rendered_functions.rendered, + not_supported_functions=not_supported_functions, + meta_info=query_container.meta_info, + source_mapping=source_mapping, + ) + def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str: queries_map = {} errors = [] source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids) for source_mapping in source_mappings: - rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping) - prefix = self.generate_prefix(source_mapping.log_source_signature, rendered_functions.rendered_prefix) try: - if source_mapping.raw_log_fields: - defined_raw_log_fields = self.generate_raw_log_fields( - fields=query_container.meta_info.query_fields, source_mapping=source_mapping - ) - prefix += f"\n{defined_raw_log_fields}" - result = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping) + finalized_query = self._generate_from_tokenized_query_container_by_source_mapping( + query_container, source_mapping + ) except StrictPlatformException as err: errors.append(err) continue - else: - not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported - finalized_query = self.finalize_query( - prefix=prefix, - query=result, - functions=rendered_functions.rendered, - not_supported_functions=not_supported_functions, - meta_info=query_container.meta_info, - source_mapping=source_mapping, - ) - if return_only_first_query_ctx_var.get() is True: - return finalized_query - queries_map[source_mapping.source_id] = finalized_query + + if return_only_first_query_ctx_var.get() is True: + return finalized_query + queries_map[source_mapping.source_id] = finalized_query if not queries_map and errors: raise errors[0] return self.finalize(queries_map) diff --git a/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py b/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py index dfbc2ee6..272bdfdc 100644 --- a/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py +++ b/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py @@ -19,7 +19,6 @@ from app.translator.const import DEFAULT_VALUE_TYPE from app.translator.core.const import TOKEN_TYPE -from app.translator.core.context_vars import return_only_first_query_ctx_var from app.translator.core.custom_types.meta_info import SeverityType from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType, OperatorType from app.translator.core.custom_types.values import ValueType @@ -244,40 +243,33 @@ def __replace_not_tokens(self, tokens: list[TOKEN_TYPE]) -> list[TOKEN_TYPE]: return tokens - def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str: - queries_map = {} - source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids) - - for source_mapping in source_mappings: - is_event_type_set = False - field_values = [token for token in query_container.tokens if isinstance(token, FieldValue)] - mapped_fields_set = set() - for field_value in field_values: - mapped_fields = self.map_field(field_value.field, source_mapping) - mapped_fields_set = mapped_fields_set.union(set(mapped_fields)) - if _EVENT_TYPE_FIELD in mapped_fields: - is_event_type_set = True - self.__update_event_type_values(field_value, source_mapping.source_id) - - tokens = self.__replace_not_tokens(query_container.tokens) - result = self.generate_query(tokens=tokens, source_mapping=source_mapping) - prefix = "" if is_event_type_set else self.generate_prefix(source_mapping.log_source_signature) - rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping) - not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported - finalized_query = self.finalize_query( - prefix=prefix, - query=result, - functions=rendered_functions.rendered, - not_supported_functions=not_supported_functions, - meta_info=query_container.meta_info, - source_mapping=source_mapping, - fields=mapped_fields_set, - ) - if return_only_first_query_ctx_var.get() is True: - return finalized_query - queries_map[source_mapping.source_id] = finalized_query - - return self.finalize(queries_map) + def _generate_from_tokenized_query_container_by_source_mapping( + self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping + ) -> str: + is_event_type_set = False + field_values = [token for token in query_container.tokens if isinstance(token, FieldValue)] + mapped_fields_set = set() + for field_value in field_values: + mapped_fields = self.map_field(field_value.field, source_mapping) + mapped_fields_set = mapped_fields_set.union(set(mapped_fields)) + if _EVENT_TYPE_FIELD in mapped_fields: + is_event_type_set = True + self.__update_event_type_values(field_value, source_mapping.source_id) + + tokens = self.__replace_not_tokens(query_container.tokens) + result = self.generate_query(tokens=tokens, source_mapping=source_mapping) + prefix = "" if is_event_type_set else self.generate_prefix(source_mapping.log_source_signature) + rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping) + not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported + return self.finalize_query( + prefix=prefix, + query=result, + functions=rendered_functions.rendered, + not_supported_functions=not_supported_functions, + meta_info=query_container.meta_info, + source_mapping=source_mapping, + fields=mapped_fields_set, + ) @staticmethod def __update_event_type_values(field_value: FieldValue, source_id: str) -> None: diff --git a/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py b/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py index 4a288491..002704d7 100644 --- a/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py +++ b/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py @@ -20,7 +20,6 @@ from typing import Union from app.translator.const import DEFAULT_VALUE_TYPE -from app.translator.core.context_vars import return_only_first_query_ctx_var from app.translator.core.custom_types.tokens import LogicalOperatorType from app.translator.core.custom_types.values import ValueType from app.translator.core.exceptions.core import StrictPlatformException @@ -242,30 +241,23 @@ def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapp return super().apply_token(token, source_mapping) - def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str: - queries_map = {} - source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids) - - for source_mapping in source_mappings: - prefix = self.generate_prefix(source_mapping.log_source_signature) - if "product" in query_container.meta_info.parsed_logsources: - prefix = f"{prefix} CONTAINS {query_container.meta_info.parsed_logsources['product'][0]}" - else: - prefix = f"{prefix} CONTAINS anything" - - result = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping) - rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping) - not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported - finalized_query = self.finalize_query( - prefix=prefix, - query=result, - functions=rendered_functions.rendered, - not_supported_functions=not_supported_functions, - meta_info=query_container.meta_info, - source_mapping=source_mapping, - ) - if return_only_first_query_ctx_var.get() is True: - return finalized_query - queries_map[source_mapping.source_id] = finalized_query - - return self.finalize(queries_map) + def _generate_from_tokenized_query_container_by_source_mapping( + self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping + ) -> str: + prefix = self.generate_prefix(source_mapping.log_source_signature) + if "product" in query_container.meta_info.parsed_logsources: + prefix = f"{prefix} CONTAINS {query_container.meta_info.parsed_logsources['product'][0]}" + else: + prefix = f"{prefix} CONTAINS anything" + + result = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping) + rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping) + not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported + return self.finalize_query( + prefix=prefix, + query=result, + functions=rendered_functions.rendered, + not_supported_functions=not_supported_functions, + meta_info=query_container.meta_info, + source_mapping=source_mapping, + ) diff --git a/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py b/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py index d8d76a04..619e30ff 100644 --- a/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py +++ b/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py @@ -20,13 +20,15 @@ from typing import ClassVar, Optional, Union from app.translator.const import DEFAULT_VALUE_TYPE -from app.translator.core.context_vars import preset_log_source_str_ctx_var +from app.translator.core.context_vars import preset_log_source_str_ctx_var, return_only_first_query_ctx_var from app.translator.core.custom_types.tokens import OperatorType from app.translator.core.custom_types.values import ValueType -from app.translator.core.mapping import SourceMapping +from app.translator.core.exceptions.core import StrictPlatformException +from app.translator.core.mapping import DEFAULT_MAPPING_NAME, SourceMapping from app.translator.core.models.field import FieldValue, Keyword from app.translator.core.models.identifier import Identifier from app.translator.core.models.platform_details import PlatformDetails +from app.translator.core.models.query_container import TokenizedQueryContainer from app.translator.core.render import BaseFieldFieldRender, BaseFieldValueRender, PlatformQueryRender from app.translator.core.str_value_manager import StrValue from app.translator.managers import render_manager @@ -71,8 +73,7 @@ def _wrap_str_value(value: str) -> str: def equal_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: if isinstance(value, list): values = ", ".join( - f"{self._pre_process_value(field, str(v) if isinstance(v, int) else v, ValueType.value, True)}" - for v in value + f"{self._pre_process_value(field, str(v), value_type=ValueType.value, wrap_str=True)}" for v in value ) return f"{field} in ({values})" @@ -222,3 +223,29 @@ def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapp @staticmethod def _finalize_search_query(query: str) -> str: return f"| filter {query}" if query else "" + + def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str: + queries_map = {} + errors = [] + source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids) + + for source_mapping in source_mappings: + try: + finalized_query = self._generate_from_tokenized_query_container_by_source_mapping( + query_container, source_mapping + ) + except StrictPlatformException as err: + if source_mapping.source_id != DEFAULT_MAPPING_NAME: + errors.append(err) + continue + + finalized_query = self._generate_from_tokenized_query_container_by_source_mapping( + query_container, self.mappings.get_source_mapping(DEFAULT_MAPPING_NAME) + ) + + if return_only_first_query_ctx_var.get() is True: + return finalized_query + queries_map[source_mapping.source_id] = finalized_query + if not queries_map and errors: + raise errors[0] + return self.finalize(queries_map) From 59cf1e7a62536a20c9e65a899959daf2db24e127 Mon Sep 17 00:00:00 2001 From: Oleksandr Volha Date: Tue, 2 Jul 2024 13:03:10 +0300 Subject: [PATCH 2/2] fixes --- uncoder-core/app/translator/core/render.py | 6 ++--- .../palo_alto/renders/cortex_xsiam.py | 23 +++++++++++-------- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/uncoder-core/app/translator/core/render.py b/uncoder-core/app/translator/core/render.py index 991d2f8c..e2f8d9fb 100644 --- a/uncoder-core/app/translator/core/render.py +++ b/uncoder-core/app/translator/core/render.py @@ -429,13 +429,13 @@ def generate_from_tokenized_query_container(self, query_container: TokenizedQuer finalized_query = self._generate_from_tokenized_query_container_by_source_mapping( query_container, source_mapping ) + if return_only_first_query_ctx_var.get() is True: + return finalized_query + queries_map[source_mapping.source_id] = finalized_query except StrictPlatformException as err: errors.append(err) continue - if return_only_first_query_ctx_var.get() is True: - return finalized_query - queries_map[source_mapping.source_id] = finalized_query if not queries_map and errors: raise errors[0] return self.finalize(queries_map) diff --git a/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py b/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py index 619e30ff..dc25affa 100644 --- a/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py +++ b/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py @@ -16,7 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ - +from contextlib import suppress from typing import ClassVar, Optional, Union from app.translator.const import DEFAULT_VALUE_TYPE @@ -229,23 +229,26 @@ def generate_from_tokenized_query_container(self, query_container: TokenizedQuer errors = [] source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids) - for source_mapping in source_mappings: + last_mapping_index = len(source_mappings) - 1 + for index, source_mapping in enumerate(source_mappings): try: finalized_query = self._generate_from_tokenized_query_container_by_source_mapping( query_container, source_mapping ) + if return_only_first_query_ctx_var.get() is True: + return finalized_query + queries_map[source_mapping.source_id] = finalized_query except StrictPlatformException as err: - if source_mapping.source_id != DEFAULT_MAPPING_NAME: - errors.append(err) + errors.append(err) + if index != last_mapping_index or source_mapping.source_id == DEFAULT_MAPPING_NAME or queries_map: continue - finalized_query = self._generate_from_tokenized_query_container_by_source_mapping( - query_container, self.mappings.get_source_mapping(DEFAULT_MAPPING_NAME) - ) + with suppress(StrictPlatformException): + finalized_query = self._generate_from_tokenized_query_container_by_source_mapping( + query_container, self.mappings.get_source_mapping(DEFAULT_MAPPING_NAME) + ) + queries_map[source_mapping.source_id] = finalized_query - if return_only_first_query_ctx_var.get() is True: - return finalized_query - queries_map[source_mapping.source_id] = finalized_query if not queries_map and errors: raise errors[0] return self.finalize(queries_map) pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy