diff --git a/uncoder-core/app/translator/core/const.py b/uncoder-core/app/translator/core/const.py index a8788ada..c8fd16ce 100644 --- a/uncoder-core/app/translator/core/const.py +++ b/uncoder-core/app/translator/core/const.py @@ -1,6 +1,10 @@ from typing import Union -from app.translator.core.models.field import Alias, Field, FieldValue, Keyword -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field import Alias, Field +from app.translator.core.models.query_tokens.field_field import FieldField +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.function_value import FunctionValue +from app.translator.core.models.query_tokens.identifier import Identifier +from app.translator.core.models.query_tokens.keyword import Keyword -TOKEN_TYPE = Union[FieldValue, Keyword, Identifier, Field, Alias] +QUERY_TOKEN_TYPE = Union[FieldField, FieldValue, FunctionValue, Keyword, Identifier, Field, Alias] diff --git a/uncoder-core/app/translator/core/custom_types/functions.py b/uncoder-core/app/translator/core/custom_types/functions.py index 17452c5b..8cecc010 100644 --- a/uncoder-core/app/translator/core/custom_types/functions.py +++ b/uncoder-core/app/translator/core/custom_types/functions.py @@ -22,7 +22,6 @@ class FunctionType(CustomEnum): upper = "upper" array_length = "array_length" - compare = "compare" extract_time = "extract_time" ipv4_is_in_range = "ipv4_is_in_range" diff --git a/uncoder-core/app/translator/core/functions.py b/uncoder-core/app/translator/core/functions.py index 924b8d53..2517129b 100644 --- a/uncoder-core/app/translator/core/functions.py +++ b/uncoder-core/app/translator/core/functions.py @@ -25,8 +25,8 @@ from app.translator.core.exceptions.functions import NotSupportedFunctionException from app.translator.core.mapping import SourceMapping -from app.translator.core.models.field import Alias, Field from app.translator.core.models.functions.base import Function, ParsedFunctions, RenderedFunctions +from app.translator.core.models.query_tokens.field import Alias, Field from app.translator.tools.utils import execute_module from settings import INIT_FUNCTIONS @@ -83,7 +83,6 @@ def parse(self, func_body: str, raw: str) -> Function: class FunctionRender(ABC): function_names_map: ClassVar[dict[str, str]] = {} order_to_render: int = 0 - in_query_render: bool = False render_to_prefix: bool = False manager: PlatformFunctionsManager = None @@ -117,7 +116,6 @@ def __init__(self): self._parsers_map: dict[str, FunctionParser] = {} # {platform_func_name: FunctionParser} self._renders_map: dict[str, FunctionRender] = {} # {generic_func_name: FunctionRender} - self._in_query_renders_map: dict[str, FunctionRender] = {} # {generic_func_name: FunctionRender} self._order_to_render: dict[str, int] = {} # {generic_func_name: int} def register_render(self, render_class: type[FunctionRender]) -> type[FunctionRender]: @@ -126,8 +124,6 @@ def register_render(self, render_class: type[FunctionRender]) -> type[FunctionRe for generic_function_name in render.function_names_map: self._renders_map[generic_function_name] = render self._order_to_render[generic_function_name] = render.order_to_render - if render.in_query_render: - self._in_query_renders_map[generic_function_name] = render return render_class @@ -149,24 +145,16 @@ def get_hof_parser(self, platform_func_name: str) -> HigherOrderFunctionParser: raise NotSupportedFunctionException - def get_parser(self, platform_func_name: str) -> FunctionParser: + def get_parser(self, platform_func_name: str) -> Optional[FunctionParser]: if INIT_FUNCTIONS and (parser := self._parsers_map.get(platform_func_name)): return parser - raise NotSupportedFunctionException - def get_render(self, generic_func_name: str) -> FunctionRender: if INIT_FUNCTIONS and (render := self._renders_map.get(generic_func_name)): return render raise NotSupportedFunctionException - def get_in_query_render(self, generic_func_name: str) -> FunctionRender: - if INIT_FUNCTIONS and (render := self._in_query_renders_map.get(generic_func_name)): - return render - - raise NotSupportedFunctionException - @property def order_to_render(self) -> dict[str, int]: if INIT_FUNCTIONS: diff --git a/uncoder-core/app/translator/core/mixins/logic.py b/uncoder-core/app/translator/core/mixins/logic.py index b24a1c99..7002e847 100644 --- a/uncoder-core/app/translator/core/mixins/logic.py +++ b/uncoder-core/app/translator/core/mixins/logic.py @@ -1,19 +1,21 @@ -from typing import Union - +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType -from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field_field import FieldField +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.function_value import FunctionValue +from app.translator.core.models.query_tokens.identifier import Identifier +from app.translator.core.models.query_tokens.keyword import Keyword class ANDLogicOperatorMixin: @staticmethod - def get_missed_and_token_indices(tokens: list[Union[FieldValue, Keyword, Identifier]]) -> list[int]: + def get_missed_and_token_indices(tokens: list[QUERY_TOKEN_TYPE]) -> list[int]: missed_and_indices = [] for index in range(len(tokens) - 1): token = tokens[index] next_token = tokens[index + 1] if ( - isinstance(token, (FieldValue, Keyword)) + isinstance(token, (FieldField, FieldValue, FunctionValue, Keyword)) or isinstance(token, Identifier) and token.token_type == GroupType.R_PAREN ) and not ( @@ -23,9 +25,7 @@ def get_missed_and_token_indices(tokens: list[Union[FieldValue, Keyword, Identif missed_and_indices.append(index + 1) return list(reversed(missed_and_indices)) - def add_and_token_if_missed( - self, tokens: list[Union[FieldValue, Keyword, Identifier]] - ) -> list[Union[FieldValue, Keyword, Identifier]]: + def add_and_token_if_missed(self, tokens: list[QUERY_TOKEN_TYPE]) -> list[QUERY_TOKEN_TYPE]: indices = self.get_missed_and_token_indices(tokens=tokens) for index in indices: tokens.insert(index, Identifier(token_type=LogicalOperatorType.AND)) diff --git a/uncoder-core/app/translator/core/mixins/operator.py b/uncoder-core/app/translator/core/mixins/operator.py index dee82395..dec9e3f4 100644 --- a/uncoder-core/app/translator/core/mixins/operator.py +++ b/uncoder-core/app/translator/core/mixins/operator.py @@ -19,7 +19,7 @@ from typing import Optional, Union from app.translator.core.custom_types.tokens import OperatorType -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.identifier import Identifier class WildCardMixin: diff --git a/uncoder-core/app/translator/core/models/field.py b/uncoder-core/app/translator/core/models/field.py deleted file mode 100644 index d9facb77..00000000 --- a/uncoder-core/app/translator/core/models/field.py +++ /dev/null @@ -1,136 +0,0 @@ -from typing import Optional, Union - -from app.translator.core.custom_types.tokens import STR_SEARCH_OPERATORS, OperatorType -from app.translator.core.mapping import DEFAULT_MAPPING_NAME, SourceMapping -from app.translator.core.models.identifier import Identifier -from app.translator.core.str_value_manager import StrValue - - -class Alias: - def __init__(self, name: str): - self.name = name - - -class Field: - def __init__(self, source_name: str): - self.source_name = source_name - self.__generic_names_map = {} - - def get_generic_field_name(self, source_id: str) -> Optional[str]: - return self.__generic_names_map.get(source_id) - - def add_generic_names_map(self, generic_names_map: dict) -> None: - self.__generic_names_map = generic_names_map - - def set_generic_names_map(self, source_mappings: list[SourceMapping], default_mapping: SourceMapping) -> None: - generic_names_map = { - source_mapping.source_id: source_mapping.fields_mapping.get_generic_field_name(self.source_name) - or self.source_name - for source_mapping in source_mappings - } - if DEFAULT_MAPPING_NAME not in generic_names_map: - fields_mapping = default_mapping.fields_mapping - generic_names_map[DEFAULT_MAPPING_NAME] = ( - fields_mapping.get_generic_field_name(self.source_name) or self.source_name - ) - - self.__generic_names_map = generic_names_map - - -class PredefinedField: - def __init__(self, name: str): - self.name = name - - -class FieldField: - def __init__( - self, - source_name_left: str, - operator: Identifier, - source_name_right: str, - is_alias_left: bool = False, - is_alias_right: bool = False, - ): - self.field_left = Field(source_name=source_name_left) if not is_alias_left else None - self.alias_left = Alias(name=source_name_left) if is_alias_left else None - self.operator = operator - self.field_right = Field(source_name=source_name_right) if not is_alias_right else None - self.alias_right = Alias(name=source_name_right) if is_alias_right else None - - -class FieldValue: - def __init__( - self, - source_name: str, - operator: Identifier, - value: Union[int, str, StrValue, list, tuple], - is_alias: bool = False, - is_predefined_field: bool = False, - ): - # mapped by platform fields mapping - self.field = Field(source_name=source_name) if not (is_alias or is_predefined_field) else None - # not mapped - self.alias = Alias(name=source_name) if is_alias else None - # mapped by platform predefined fields mapping - self.predefined_field = PredefinedField(name=source_name) if is_predefined_field else None - - self.operator = operator - self.values = [] - self.__add_value(value) - - @property - def value(self) -> Union[int, str, StrValue, list[Union[int, str, StrValue]]]: - if isinstance(self.values, list) and len(self.values) == 1: - return self.values[0] - return self.values - - @value.setter - def value(self, new_value: Union[int, str, StrValue, list[Union[int, str, StrValue]]]) -> None: - self.values = [] - self.__add_value(new_value) - - def __add_value(self, value: Optional[Union[int, str, StrValue, list, tuple]]) -> None: - if value and isinstance(value, (list, tuple)): - for v in value: - self.__add_value(v) - elif ( - value - and isinstance(value, str) - and value.isnumeric() - and self.operator.token_type not in STR_SEARCH_OPERATORS - ): - self.values.append(int(value)) - elif value is not None and isinstance(value, (int, str)): - self.values.append(value) - - def __repr__(self): - if self.alias: - return f"{self.alias.name} {self.operator.token_type} {self.values}" - - if self.predefined_field: - return f"{self.predefined_field.name} {self.operator.token_type} {self.values}" - - return f"{self.field.source_name} {self.operator.token_type} {self.values}" - - -class Keyword: - def __init__(self, value: Union[str, list[str]]): - self.operator: Identifier = Identifier(token_type=OperatorType.KEYWORD) - self.name = "keyword" - self.values = [] - self.__add_value(value=value) - - @property - def value(self) -> Union[str, list[str]]: - if isinstance(self.values, list) and len(self.values) == 1: - return self.values[0] - return self.values - - def __add_value(self, value: Union[str, list[str]]) -> None: - if value and isinstance(value, (list, tuple)): - self.values.extend(value) - elif value and isinstance(value, str): - self.values.append(value) - - def __repr__(self): - return f"{self.name} {self.operator.token_type} {self.values}" diff --git a/uncoder-core/app/translator/core/models/functions/base.py b/uncoder-core/app/translator/core/models/functions/base.py index 187a92c2..05fe7535 100644 --- a/uncoder-core/app/translator/core/models/functions/base.py +++ b/uncoder-core/app/translator/core/models/functions/base.py @@ -1,16 +1,24 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import Optional, Union +from typing import TYPE_CHECKING, Optional, Union -from app.translator.core.models.field import Alias, Field, FieldValue, Keyword -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field import Alias, Field +from app.translator.core.models.query_tokens.field_field import FieldField +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.identifier import Identifier +from app.translator.core.models.query_tokens.keyword import Keyword + +if TYPE_CHECKING: + from app.translator.core.models.query_tokens.function_value import FunctionValue @dataclass class Function: name: str = None - args: list[Union[Alias, Field, FieldValue, Keyword, Function, Identifier, str, bool]] = field(default_factory=list) + args: list[ + Union[Alias, Field, FieldField, FieldValue, FunctionValue, Keyword, Function, Identifier, int, str, bool] + ] = field(default_factory=list) alias: Optional[Alias] = None raw: str = "" diff --git a/uncoder-core/app/translator/core/models/functions/bin.py b/uncoder-core/app/translator/core/models/functions/bin.py index a54884e6..828fb891 100644 --- a/uncoder-core/app/translator/core/models/functions/bin.py +++ b/uncoder-core/app/translator/core/models/functions/bin.py @@ -2,21 +2,15 @@ from typing import Optional from app.translator.core.custom_types.functions import FunctionType -from app.translator.core.models.field import Field +from app.translator.core.custom_types.time import TimeFrameType from app.translator.core.models.functions.base import Function -from app.translator.tools.custom_enum import CustomEnum - - -class SpanType(CustomEnum): - days = "days" - hours = "hours" - minutes = "minutes" +from app.translator.core.models.query_tokens.field import Field @dataclass class Span: value: str = "1" - type_: str = SpanType.days + type_: str = TimeFrameType.days @dataclass diff --git a/uncoder-core/app/translator/core/models/functions/eval.py b/uncoder-core/app/translator/core/models/functions/eval.py index 6e32449f..5632870e 100644 --- a/uncoder-core/app/translator/core/models/functions/eval.py +++ b/uncoder-core/app/translator/core/models/functions/eval.py @@ -2,9 +2,9 @@ from typing import Union from app.translator.core.custom_types.functions import FunctionType -from app.translator.core.models.field import Alias, Field from app.translator.core.models.functions.base import Function -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field import Alias, Field +from app.translator.core.models.query_tokens.identifier import Identifier @dataclass diff --git a/uncoder-core/app/translator/core/models/functions/group_by.py b/uncoder-core/app/translator/core/models/functions/group_by.py index 04b3d4e6..ef1fa745 100644 --- a/uncoder-core/app/translator/core/models/functions/group_by.py +++ b/uncoder-core/app/translator/core/models/functions/group_by.py @@ -2,8 +2,8 @@ from typing import Union from app.translator.core.custom_types.functions import FunctionType -from app.translator.core.models.field import Alias from app.translator.core.models.functions.base import Function +from app.translator.core.models.query_tokens.field import Alias @dataclass diff --git a/uncoder-core/app/translator/core/models/functions/join.py b/uncoder-core/app/translator/core/models/functions/join.py index 0f44da68..cd1ed4db 100644 --- a/uncoder-core/app/translator/core/models/functions/join.py +++ b/uncoder-core/app/translator/core/models/functions/join.py @@ -2,10 +2,10 @@ from typing import Union from app.translator.core.custom_types.functions import FunctionType -from app.translator.core.models.field import Alias, Field from app.translator.core.models.functions.base import Function -from app.translator.core.models.identifier import Identifier from app.translator.core.models.query_container import TokenizedQueryContainer +from app.translator.core.models.query_tokens.field import Alias, Field +from app.translator.core.models.query_tokens.identifier import Identifier from app.translator.tools.custom_enum import CustomEnum diff --git a/uncoder-core/app/translator/core/models/functions/rename.py b/uncoder-core/app/translator/core/models/functions/rename.py index 06455e05..5a9dba6a 100644 --- a/uncoder-core/app/translator/core/models/functions/rename.py +++ b/uncoder-core/app/translator/core/models/functions/rename.py @@ -1,8 +1,8 @@ from dataclasses import dataclass from app.translator.core.custom_types.functions import FunctionType -from app.translator.core.models.field import Alias, Field from app.translator.core.models.functions.base import Function +from app.translator.core.models.query_tokens.field import Alias, Field @dataclass diff --git a/uncoder-core/app/translator/core/models/functions/sort.py b/uncoder-core/app/translator/core/models/functions/sort.py index e35646dc..63993401 100644 --- a/uncoder-core/app/translator/core/models/functions/sort.py +++ b/uncoder-core/app/translator/core/models/functions/sort.py @@ -2,8 +2,8 @@ from typing import Union from app.translator.core.custom_types.functions import FunctionType -from app.translator.core.models.field import Alias, Field from app.translator.core.models.functions.base import Function +from app.translator.core.models.query_tokens.field import Alias, Field from app.translator.tools.custom_enum import CustomEnum diff --git a/uncoder-core/app/translator/core/models/functions/timeframe.py b/uncoder-core/app/translator/core/models/functions/time.py similarity index 68% rename from uncoder-core/app/translator/core/models/functions/timeframe.py rename to uncoder-core/app/translator/core/models/functions/time.py index b9fedc82..eb6a1229 100644 --- a/uncoder-core/app/translator/core/models/functions/timeframe.py +++ b/uncoder-core/app/translator/core/models/functions/time.py @@ -1,14 +1,8 @@ from dataclasses import dataclass from app.translator.core.custom_types.functions import FunctionType +from app.translator.core.custom_types.time import TimeFrameType from app.translator.core.models.functions.base import Function -from app.translator.tools.custom_enum import CustomEnum - - -class TimeFrameType(CustomEnum): - days = "days" - hours = "hours" - minutes = "minutes" @dataclass diff --git a/uncoder-core/app/translator/core/models/query_container.py b/uncoder-core/app/translator/core/models/query_container.py index 0d90f237..7c56c71a 100644 --- a/uncoder-core/app/translator/core/models/query_container.py +++ b/uncoder-core/app/translator/core/models/query_container.py @@ -3,11 +3,11 @@ from datetime import datetime from typing import Optional -from app.translator.core.const import TOKEN_TYPE +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.custom_types.meta_info import SeverityType from app.translator.core.mapping import DEFAULT_MAPPING_NAME -from app.translator.core.models.field import Field from app.translator.core.models.functions.base import ParsedFunctions +from app.translator.core.models.query_tokens.field import Field class MetaInfoContainer: @@ -65,6 +65,6 @@ class RawQueryDictContainer: @dataclass class TokenizedQueryContainer: - tokens: list[TOKEN_TYPE] + tokens: list[QUERY_TOKEN_TYPE] meta_info: MetaInfoContainer functions: ParsedFunctions = field(default_factory=ParsedFunctions) diff --git a/uncoder-core/app/translator/core/models/query_tokens/__init__.py b/uncoder-core/app/translator/core/models/query_tokens/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/uncoder-core/app/translator/core/models/query_tokens/field.py b/uncoder-core/app/translator/core/models/query_tokens/field.py new file mode 100644 index 00000000..84d07e4e --- /dev/null +++ b/uncoder-core/app/translator/core/models/query_tokens/field.py @@ -0,0 +1,39 @@ +from typing import Optional + +from app.translator.core.mapping import DEFAULT_MAPPING_NAME, SourceMapping + + +class Alias: + def __init__(self, name: str): + self.name = name + + +class Field: + def __init__(self, source_name: str): + self.source_name = source_name + self.__generic_names_map = {} + + def get_generic_field_name(self, source_id: str) -> Optional[str]: + return self.__generic_names_map.get(source_id) + + def add_generic_names_map(self, generic_names_map: dict) -> None: + self.__generic_names_map = generic_names_map + + def set_generic_names_map(self, source_mappings: list[SourceMapping], default_mapping: SourceMapping) -> None: + generic_names_map = { + source_mapping.source_id: source_mapping.fields_mapping.get_generic_field_name(self.source_name) + or self.source_name + for source_mapping in source_mappings + } + if DEFAULT_MAPPING_NAME not in generic_names_map: + fields_mapping = default_mapping.fields_mapping + generic_names_map[DEFAULT_MAPPING_NAME] = ( + fields_mapping.get_generic_field_name(self.source_name) or self.source_name + ) + + self.__generic_names_map = generic_names_map + + +class PredefinedField: + def __init__(self, name: str): + self.name = name diff --git a/uncoder-core/app/translator/core/models/query_tokens/field_field.py b/uncoder-core/app/translator/core/models/query_tokens/field_field.py new file mode 100644 index 00000000..86099f08 --- /dev/null +++ b/uncoder-core/app/translator/core/models/query_tokens/field_field.py @@ -0,0 +1,18 @@ +from app.translator.core.models.query_tokens.field import Alias, Field +from app.translator.core.models.query_tokens.identifier import Identifier + + +class FieldField: + def __init__( + self, + source_name_left: str, + operator: Identifier, + source_name_right: str, + is_alias_left: bool = False, + is_alias_right: bool = False, + ): + self.field_left = Field(source_name=source_name_left) if not is_alias_left else None + self.alias_left = Alias(name=source_name_left) if is_alias_left else None + self.operator = operator + self.field_right = Field(source_name=source_name_right) if not is_alias_right else None + self.alias_right = Alias(name=source_name_right) if is_alias_right else None diff --git a/uncoder-core/app/translator/core/models/query_tokens/field_value.py b/uncoder-core/app/translator/core/models/query_tokens/field_value.py new file mode 100644 index 00000000..cf491da4 --- /dev/null +++ b/uncoder-core/app/translator/core/models/query_tokens/field_value.py @@ -0,0 +1,35 @@ +from typing import Union + +from app.translator.core.custom_types.tokens import STR_SEARCH_OPERATORS +from app.translator.core.models.query_tokens.field import Alias, Field, PredefinedField +from app.translator.core.models.query_tokens.identifier import Identifier +from app.translator.core.models.query_tokens.value import Value +from app.translator.core.str_value_manager import StrValue + + +class FieldValue(Value): + def __init__( + self, + source_name: str, + operator: Identifier, + value: Union[bool, int, str, StrValue, list, tuple], + is_alias: bool = False, + is_predefined_field: bool = False, + ): + super().__init__(value, cast_to_int=operator.token_type not in STR_SEARCH_OPERATORS) + # mapped by platform fields mapping + self.field = Field(source_name=source_name) if not (is_alias or is_predefined_field) else None + # not mapped + self.alias = Alias(name=source_name) if is_alias else None + # mapped by platform predefined fields mapping + self.predefined_field = PredefinedField(name=source_name) if is_predefined_field else None + self.operator = operator + + def __repr__(self): + if self.alias: + return f"{self.alias.name} {self.operator.token_type} {self.values}" + + if self.predefined_field: + return f"{self.predefined_field.name} {self.operator.token_type} {self.values}" + + return f"{self.field.source_name} {self.operator.token_type} {self.values}" diff --git a/uncoder-core/app/translator/core/models/query_tokens/function_value.py b/uncoder-core/app/translator/core/models/query_tokens/function_value.py new file mode 100644 index 00000000..6ffd49bc --- /dev/null +++ b/uncoder-core/app/translator/core/models/query_tokens/function_value.py @@ -0,0 +1,14 @@ +from typing import Union + +from app.translator.core.custom_types.tokens import STR_SEARCH_OPERATORS +from app.translator.core.models.functions.base import Function +from app.translator.core.models.query_tokens.identifier import Identifier +from app.translator.core.models.query_tokens.value import Value +from app.translator.core.str_value_manager import StrValue + + +class FunctionValue(Value): + def __init__(self, function: Function, operator: Identifier, value: Union[int, str, StrValue, list, tuple]): + super().__init__(value, cast_to_int=operator.token_type not in STR_SEARCH_OPERATORS) + self.function = function + self.operator = operator diff --git a/uncoder-core/app/translator/core/models/identifier.py b/uncoder-core/app/translator/core/models/query_tokens/identifier.py similarity index 100% rename from uncoder-core/app/translator/core/models/identifier.py rename to uncoder-core/app/translator/core/models/query_tokens/identifier.py diff --git a/uncoder-core/app/translator/core/models/query_tokens/keyword.py b/uncoder-core/app/translator/core/models/query_tokens/keyword.py new file mode 100644 index 00000000..09382791 --- /dev/null +++ b/uncoder-core/app/translator/core/models/query_tokens/keyword.py @@ -0,0 +1,21 @@ +from typing import Union + +from app.translator.core.custom_types.tokens import OperatorType +from app.translator.core.models.query_tokens.identifier import Identifier +from app.translator.core.models.query_tokens.value import Value + + +class Keyword(Value): + def __init__(self, value: Union[str, list[str]]): + super().__init__(value) + self.operator: Identifier = Identifier(token_type=OperatorType.KEYWORD) + self.name = "keyword" + + def _add_value(self, value: Union[str, list[str]]) -> None: + if value and isinstance(value, (list, tuple)): + self.values.extend(value) + elif value and isinstance(value, str): + self.values.append(value) + + def __repr__(self): + return f"{self.name} {self.operator.token_type} {self.values}" diff --git a/uncoder-core/app/translator/core/models/query_tokens/value.py b/uncoder-core/app/translator/core/models/query_tokens/value.py new file mode 100644 index 00000000..d3d77eb0 --- /dev/null +++ b/uncoder-core/app/translator/core/models/query_tokens/value.py @@ -0,0 +1,30 @@ +from typing import Optional, Union + +from app.translator.core.str_value_manager import StrValue + + +class Value: + def __init__(self, value: Union[bool, int, str, StrValue, list, tuple], cast_to_int: bool = False): + self.values = [] + self.__cast_to_int = cast_to_int + self._add_value(value) + + @property + def value(self) -> Union[bool, int, str, StrValue, list[Union[int, str, StrValue]]]: + if isinstance(self.values, list) and len(self.values) == 1: + return self.values[0] + return self.values + + @value.setter + def value(self, new_value: Union[bool, int, str, StrValue, list[Union[int, str, StrValue]]]) -> None: + self.values = [] + self._add_value(new_value) + + def _add_value(self, value: Optional[Union[bool, int, str, StrValue, list, tuple]]) -> None: + if value and isinstance(value, (list, tuple)): + for v in value: + self._add_value(v) + elif value and isinstance(value, str) and value.isnumeric() and self.__cast_to_int: + self.values.append(int(value)) + elif value is not None and isinstance(value, (bool, int, str)): + self.values.append(value) diff --git a/uncoder-core/app/translator/core/parser.py b/uncoder-core/app/translator/core/parser.py index 18b50739..c51ada8c 100644 --- a/uncoder-core/app/translator/core/parser.py +++ b/uncoder-core/app/translator/core/parser.py @@ -18,17 +18,18 @@ import re from abc import ABC, abstractmethod -from typing import Union +from typing import Optional, Union -from app.translator.core.const import TOKEN_TYPE +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.exceptions.parser import TokenizerGeneralException from app.translator.core.functions import PlatformFunctions from app.translator.core.mapping import BasePlatformMappings, SourceMapping -from app.translator.core.models.field import Field, FieldValue, Keyword -from app.translator.core.models.functions.base import ParsedFunctions -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.functions.base import Function from app.translator.core.models.platform_details import PlatformDetails from app.translator.core.models.query_container import RawQueryContainer, TokenizedQueryContainer +from app.translator.core.models.query_tokens.field import Field +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.function_value import FunctionValue from app.translator.core.tokenizer import QueryTokenizer @@ -55,24 +56,30 @@ class PlatformQueryParser(QueryParser, ABC): tokenizer: QueryTokenizer = None platform_functions: PlatformFunctions = None - def get_fields_tokens(self, tokens: list[Union[FieldValue, Keyword, Identifier]]) -> list[Field]: - return [token.field for token in self.tokenizer.filter_tokens(tokens, FieldValue)] - - def get_tokens_and_source_mappings( - self, query: str, log_sources: dict[str, Union[str, list[str]]] - ) -> tuple[list[TOKEN_TYPE], list[SourceMapping]]: + def get_query_tokens(self, query: str) -> list[QUERY_TOKEN_TYPE]: if not query: raise TokenizerGeneralException("Can't translate empty query. Please provide more details") - tokens = self.tokenizer.tokenize(query=query) - field_tokens = self.get_fields_tokens(tokens=tokens) + return self.tokenizer.tokenize(query=query) + + def get_field_tokens( + self, query_tokens: list[QUERY_TOKEN_TYPE], functions: Optional[list[Function]] = None + ) -> list[Field]: + field_tokens = [] + for token in query_tokens: + if isinstance(token, FieldValue): + field_tokens.append(token.field) + elif isinstance(token, FunctionValue): + field_tokens.extend(self.tokenizer.get_field_tokens_from_func_args([token.function])) + + if functions: + field_tokens.extend(self.tokenizer.get_field_tokens_from_func_args(functions)) + + return field_tokens + + def get_source_mappings( + self, field_tokens: list[Field], log_sources: dict[str, Union[str, list[str]]] + ) -> list[SourceMapping]: field_names = [field.source_name for field in field_tokens] source_mappings = self.mappings.get_suitable_source_mappings(field_names=field_names, **log_sources) self.tokenizer.set_field_tokens_generic_names_map(field_tokens, source_mappings, self.mappings.default_mapping) - - return tokens, source_mappings - - def set_functions_fields_generic_names( - self, functions: ParsedFunctions, source_mappings: list[SourceMapping] - ) -> None: - field_tokens = self.tokenizer.get_field_tokens_from_func_args(args=functions.functions) - self.tokenizer.set_field_tokens_generic_names_map(field_tokens, source_mappings, self.mappings.default_mapping) + return source_mappings diff --git a/uncoder-core/app/translator/core/render.py b/uncoder-core/app/translator/core/render.py index 345cef0b..618f2d37 100644 --- a/uncoder-core/app/translator/core/render.py +++ b/uncoder-core/app/translator/core/render.py @@ -22,7 +22,7 @@ from typing import ClassVar, Optional, Union from app.translator.const import DEFAULT_VALUE_TYPE -from app.translator.core.const import TOKEN_TYPE +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.context_vars import return_only_first_query_ctx_var, wrap_query_with_meta_info_ctx_var from app.translator.core.custom_types.tokens import LogicalOperatorType, OperatorType from app.translator.core.custom_types.values import ValueType @@ -31,11 +31,15 @@ from app.translator.core.exceptions.parser import UnsupportedOperatorException from app.translator.core.functions import PlatformFunctions from app.translator.core.mapping import DEFAULT_MAPPING_NAME, BasePlatformMappings, LogSourceSignature, SourceMapping -from app.translator.core.models.field import Field, FieldField, FieldValue, Keyword, PredefinedField from app.translator.core.models.functions.base import Function, RenderedFunctions -from app.translator.core.models.identifier import Identifier from app.translator.core.models.platform_details import PlatformDetails from app.translator.core.models.query_container import MetaInfoContainer, RawQueryContainer, TokenizedQueryContainer +from app.translator.core.models.query_tokens.field import Field, PredefinedField +from app.translator.core.models.query_tokens.field_field import FieldField +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.function_value import FunctionValue +from app.translator.core.models.query_tokens.identifier import Identifier +from app.translator.core.models.query_tokens.keyword import Keyword from app.translator.core.str_value_manager import StrValue, StrValueManager @@ -74,6 +78,10 @@ def _get_value_type(field_name: str, value: Union[int, str, StrValue], value_typ def _wrap_str_value(value: str) -> str: return value + @staticmethod + def _map_bool_value(value: bool) -> str: + return "true" if value else "false" + def _pre_process_value( self, field: str, value: Union[int, str, StrValue], value_type: str = ValueType.value, wrap_str: bool = False ) -> Union[int, str]: @@ -84,6 +92,8 @@ def _pre_process_value( if isinstance(value, str): value = self.str_value_manager.escape_manager.escape(value, value_type) return self._wrap_str_value(value) if wrap_str else value + if isinstance(value, bool): + return self._map_bool_value(value) return value def _pre_process_values_list( @@ -258,7 +268,7 @@ def map_predefined_field(self, predefined_field: PredefinedField) -> str: return mapped_predefined_field_name - def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapping: SourceMapping) -> str: + def apply_token(self, token: QUERY_TOKEN_TYPE, source_mapping: SourceMapping) -> str: if isinstance(token, FieldValue): if token.alias: mapped_fields = [token.alias.name] @@ -286,9 +296,12 @@ def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapp ] ) return self.group_token % joined if len(cross_paired_fields) > 1 else joined - if isinstance(token, Function): - func_render = self.platform_functions.manager.get_in_query_render(token.name) - return func_render.render(token, source_mapping) + if isinstance(token, FunctionValue): + func_render = self.platform_functions.manager.get_render(token.function.name) + rendered_func = func_render.render(token.function, source_mapping) + return self.field_value_render.apply_field_value( + field=rendered_func, operator=token.operator, value=token.value + ) if isinstance(token, Keyword): return self.field_value_render.apply_field_value(field="", operator=token.operator, value=token.value) if token.token_type in LogicalOperatorType: @@ -296,7 +309,7 @@ def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapp return token.token_type - def generate_query(self, tokens: list[TOKEN_TYPE], source_mapping: SourceMapping) -> str: + def generate_query(self, tokens: list[QUERY_TOKEN_TYPE], source_mapping: SourceMapping) -> str: result_values = [] unmapped_fields = set() for token in tokens: diff --git a/uncoder-core/app/translator/core/tokenizer.py b/uncoder-core/app/translator/core/tokenizer.py index ff9385ba..08295917 100644 --- a/uncoder-core/app/translator/core/tokenizer.py +++ b/uncoder-core/app/translator/core/tokenizer.py @@ -20,17 +20,18 @@ from abc import ABC, abstractmethod from typing import Any, ClassVar, Optional, Union -from app.translator.core.const import TOKEN_TYPE +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType, OperatorType from app.translator.core.custom_types.values import ValueType from app.translator.core.escape_manager import EscapeManager +from app.translator.core.exceptions.functions import NotSupportedFunctionException from app.translator.core.exceptions.parser import ( QueryParenthesesException, TokenizerGeneralException, UnsupportedOperatorException, ) +from app.translator.core.functions import PlatformFunctions from app.translator.core.mapping import SourceMapping -from app.translator.core.models.field import Field, FieldField, FieldValue, Keyword from app.translator.core.models.functions.base import Function from app.translator.core.models.functions.eval import EvalArg from app.translator.core.models.functions.group_by import GroupByFunction @@ -38,14 +39,19 @@ from app.translator.core.models.functions.rename import RenameArg from app.translator.core.models.functions.sort import SortArg from app.translator.core.models.functions.union import UnionFunction -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field import Field +from app.translator.core.models.query_tokens.field_field import FieldField +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.function_value import FunctionValue +from app.translator.core.models.query_tokens.identifier import Identifier +from app.translator.core.models.query_tokens.keyword import Keyword from app.translator.core.str_value_manager import StrValue, StrValueManager from app.translator.tools.utils import get_match_group class BaseTokenizer(ABC): @abstractmethod - def tokenize(self, query: str) -> list[Union[FieldValue, Keyword, Identifier]]: + def tokenize(self, query: str) -> list[QUERY_TOKEN_TYPE]: raise NotImplementedError @@ -64,6 +70,7 @@ class QueryTokenizer(BaseTokenizer): # do not modify, use subclasses to define this attribute field_pattern: str = None + function_pattern: str = None _value_pattern: str = None value_pattern: str = None multi_value_pattern: str = None @@ -73,6 +80,7 @@ class QueryTokenizer(BaseTokenizer): wildcard_symbol = None escape_manager: EscapeManager = None str_value_manager: StrValueManager = None + platform_functions: PlatformFunctions = None def __init_subclass__(cls, **kwargs): cls._validate_re_patterns() @@ -268,9 +276,16 @@ def _check_field_value_match(self, query: str, white_space_pattern: str = r"\s+" return False + def search_function_value(self, query: str) -> tuple[FunctionValue, str]: # noqa: ARG002 + raise NotSupportedFunctionException + + @staticmethod + def _check_function_value_match(query: str) -> bool: # noqa: ARG004 + return False + def _get_next_token( self, query: str - ) -> tuple[Union[FieldValue, Keyword, Identifier, list[Union[FieldValue, Identifier]]], str]: + ) -> tuple[Union[FieldValue, FunctionValue, Keyword, Identifier, list[Union[FieldValue, Identifier]]], str]: query = query.strip("\n").strip(" ").strip("\n") if query.startswith(GroupType.L_PAREN): return Identifier(token_type=GroupType.L_PAREN), query[1:] @@ -280,6 +295,8 @@ def _get_next_token( logical_operator = logical_operator_search.group("logical_operator") pos = logical_operator_search.end() return Identifier(token_type=logical_operator.lower()), query[pos:] + if self.platform_functions and self._check_function_value_match(query): + return self.search_function_value(query) if self._check_field_value_match(query): return self.search_field_value(query) if self.keyword_pattern and re.match(self.keyword_pattern, query): @@ -288,7 +305,7 @@ def _get_next_token( raise TokenizerGeneralException("Unsupported query entry") @staticmethod - def _validate_parentheses(tokens: list[TOKEN_TYPE]) -> None: + def _validate_parentheses(tokens: list[QUERY_TOKEN_TYPE]) -> None: parentheses = [] for token in tokens: if isinstance(token, Identifier) and token.token_type in (GroupType.L_PAREN, GroupType.R_PAREN): @@ -301,7 +318,7 @@ def _validate_parentheses(tokens: list[TOKEN_TYPE]) -> None: if parentheses: raise QueryParenthesesException - def tokenize(self, query: str) -> list[Union[FieldValue, Keyword, Identifier]]: + def tokenize(self, query: str) -> list[QUERY_TOKEN_TYPE]: tokenized = [] while query: next_token, sliced_query = self._get_next_token(query=query) @@ -320,8 +337,9 @@ def tokenize(self, query: str) -> list[Union[FieldValue, Keyword, Identifier]]: @staticmethod def filter_tokens( - tokens: list[TOKEN_TYPE], token_type: Union[type[FieldValue], type[Field], type[Keyword], type[Identifier]] - ) -> list[TOKEN_TYPE]: + tokens: list[QUERY_TOKEN_TYPE], + token_type: Union[type[FieldValue], type[Field], type[Keyword], type[Identifier]], + ) -> list[QUERY_TOKEN_TYPE]: return [token for token in tokens if isinstance(token, token_type)] def get_field_tokens_from_func_args( # noqa: PLR0912 @@ -339,6 +357,8 @@ def get_field_tokens_from_func_args( # noqa: PLR0912 elif isinstance(arg, FieldValue): if arg.field: result.append(arg.field) + elif isinstance(arg, FunctionValue): + result.extend(self.get_field_tokens_from_func_args(args=[arg.function])) elif isinstance(arg, GroupByFunction): result.extend(self.get_field_tokens_from_func_args(args=arg.args)) result.extend(self.get_field_tokens_from_func_args(args=arg.by_clauses)) diff --git a/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/default.yml b/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/default.yml index fa904aaf..ac3f8c9c 100644 --- a/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/default.yml +++ b/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/default.yml @@ -126,3 +126,4 @@ field_mapping: DestinationOS: xdm.target.host.os url_category: xdm.network.http.url_category EventSeverity: xdm.alert.severity + duration: xdm.event.duration diff --git a/uncoder-core/app/translator/mappings/platforms/qradar/default.yml b/uncoder-core/app/translator/mappings/platforms/qradar/default.yml index 00dcef55..1e098a77 100644 --- a/uncoder-core/app/translator/mappings/platforms/qradar/default.yml +++ b/uncoder-core/app/translator/mappings/platforms/qradar/default.yml @@ -77,4 +77,5 @@ field_mapping: EventSeverity: EventSeverity Source: - Source - - source \ No newline at end of file + - source + duration: duration \ No newline at end of file diff --git a/uncoder-core/app/translator/platforms/base/aql/functions/__init__.py b/uncoder-core/app/translator/platforms/base/aql/functions/__init__.py index 3aed1306..813ec885 100644 --- a/uncoder-core/app/translator/platforms/base/aql/functions/__init__.py +++ b/uncoder-core/app/translator/platforms/base/aql/functions/__init__.py @@ -23,9 +23,9 @@ from app.translator.core.custom_types.functions import FunctionType from app.translator.core.exceptions.functions import InvalidFunctionSignature, NotSupportedFunctionException from app.translator.core.functions import PlatformFunctions -from app.translator.core.models.field import Field from app.translator.core.models.functions.base import Function, ParsedFunctions from app.translator.core.models.functions.sort import SortLimitFunction +from app.translator.core.models.query_tokens.field import Field from app.translator.platforms.base.aql.const import TABLE_PATTERN from app.translator.platforms.base.aql.functions.const import ( AGGREGATION_FUNCTIONS_MAP, diff --git a/uncoder-core/app/translator/platforms/base/aql/functions/const.py b/uncoder-core/app/translator/platforms/base/aql/functions/const.py index 141820b4..e9481cc0 100644 --- a/uncoder-core/app/translator/platforms/base/aql/functions/const.py +++ b/uncoder-core/app/translator/platforms/base/aql/functions/const.py @@ -24,6 +24,7 @@ class AQLFunctionType(CustomEnum): class AQLFunctionGroupType(CustomEnum): agg = "agg" + str_conversion = "str_conversion" class AQLSortOrderType(CustomEnum): diff --git a/uncoder-core/app/translator/platforms/base/aql/parsers/aql.py b/uncoder-core/app/translator/platforms/base/aql/parsers/aql.py index f911ea27..4bc3f46a 100644 --- a/uncoder-core/app/translator/platforms/base/aql/parsers/aql.py +++ b/uncoder-core/app/translator/platforms/base/aql/parsers/aql.py @@ -27,12 +27,12 @@ from app.translator.platforms.base.aql.functions import AQLFunctions, aql_functions from app.translator.platforms.base.aql.log_source_map import LOG_SOURCE_FUNCTIONS_MAP from app.translator.platforms.base.aql.mapping import AQLMappings, aql_mappings -from app.translator.platforms.base.aql.tokenizer import AQLTokenizer, aql_tokenizer +from app.translator.platforms.base.aql.tokenizer import AQLTokenizer from app.translator.tools.utils import get_match_group class AQLQueryParser(PlatformQueryParser): - tokenizer: AQLTokenizer = aql_tokenizer + tokenizer: AQLTokenizer = AQLTokenizer(aql_functions) mappings: AQLMappings = aql_mappings platform_functions: AQLFunctions = aql_functions @@ -116,10 +116,10 @@ def _parse_query(self, text: str) -> tuple[str, dict[str, Union[list[str], list[ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer: query, log_sources, functions = self._parse_query(raw_query_container.query) - tokens, source_mappings = self.get_tokens_and_source_mappings(query, log_sources) - fields_tokens = self.get_fields_tokens(tokens=tokens) - self.set_functions_fields_generic_names(functions=functions, source_mappings=source_mappings) + query_tokens = self.get_query_tokens(query) + field_tokens = self.get_field_tokens(query_tokens, functions.functions) + source_mappings = self.get_source_mappings(field_tokens, log_sources) meta_info = raw_query_container.meta_info - meta_info.query_fields = fields_tokens + meta_info.query_fields = field_tokens meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings] - return TokenizedQueryContainer(tokens=tokens, meta_info=meta_info, functions=functions) + return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info, functions=functions) diff --git a/uncoder-core/app/translator/platforms/base/aql/tokenizer.py b/uncoder-core/app/translator/platforms/base/aql/tokenizer.py index 54a797eb..16aa96fe 100644 --- a/uncoder-core/app/translator/platforms/base/aql/tokenizer.py +++ b/uncoder-core/app/translator/platforms/base/aql/tokenizer.py @@ -21,11 +21,16 @@ from app.translator.core.custom_types.tokens import OperatorType from app.translator.core.custom_types.values import ValueType -from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.identifier import Identifier +from app.translator.core.functions import PlatformFunctions +from app.translator.core.models.functions.base import Function +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.function_value import FunctionValue +from app.translator.core.models.query_tokens.identifier import Identifier +from app.translator.core.models.query_tokens.keyword import Keyword from app.translator.core.str_value_manager import StrValue from app.translator.core.tokenizer import QueryTokenizer from app.translator.platforms.base.aql.const import NUM_VALUE_PATTERN, SINGLE_QUOTES_VALUE_PATTERN, UTF8_PAYLOAD_PATTERN +from app.translator.platforms.base.aql.functions.const import AQLFunctionGroupType from app.translator.platforms.base.aql.str_value_manager import aql_str_value_manager from app.translator.tools.utils import get_match_group @@ -46,6 +51,7 @@ class AQLTokenizer(QueryTokenizer): multi_value_operators_map: ClassVar[dict[str, str]] = {"in": OperatorType.EQ} field_pattern = r'(?P"[a-zA-Z\._\-\s]+"|[a-zA-Z\._\-]+)' + function_pattern = r'(?P[a-zA-Z_]+)\((?:(?:[a-zA-Z\._\-\s]+)|(?:"[a-zA-Z\._\-]+"))\)' bool_value_pattern = rf"(?P<{ValueType.bool_value}>true|false)\s*" _value_pattern = rf"{NUM_VALUE_PATTERN}|{bool_value_pattern}|{SINGLE_QUOTES_VALUE_PATTERN}" multi_value_pattern = rf"""\((?P<{ValueType.multi_value}>[:a-zA-Z\"\*0-9=+%#\-_\/\\'\,.&^@!\(\s]*)\)""" @@ -54,6 +60,9 @@ class AQLTokenizer(QueryTokenizer): wildcard_symbol = "%" str_value_manager = aql_str_value_manager + def __init__(self, platform_functions: PlatformFunctions = None): + self.platform_functions = platform_functions + @staticmethod def should_process_value_wildcards(operator: Optional[str]) -> bool: return operator and operator.lower() in ("like", "ilike") @@ -79,7 +88,7 @@ def get_operator_and_value( return super().get_operator_and_value(match, mapped_operator, operator) def escape_field_name(self, field_name: str) -> str: - return field_name.replace('"', r"\"").replace(" ", r"\ ") + return field_name.replace('"', r"\"").replace(" ", r"\ ").replace("(", "\(").replace(")", "\)") @staticmethod def create_field_value(field_name: str, operator: Identifier, value: Union[str, list]) -> FieldValue: @@ -93,5 +102,39 @@ def search_keyword(self, query: str) -> tuple[Keyword, str]: pos = keyword_search.end() return keyword, query[pos:] - -aql_tokenizer = AQLTokenizer() + def _search_function_value(self, function: Function, query: str) -> tuple[FunctionValue, str]: + operator = self.search_operator(query, function.raw) + if self.is_multi_value_flow(function.raw, operator, query): + query, grouped_values = self.search_multi_value(query=query, operator=operator, field_name=function.raw) + tokens = [ # always consists of 1 element + FunctionValue(function=function, operator=Identifier(token_type=op), value=values) + for op, values in grouped_values.items() + ] + return tokens[0], query + + query, operator, value = self.search_single_value(query=query, operator=operator, field_name=function.raw) + operator_token = Identifier(token_type=operator) + return FunctionValue(function=function, operator=operator_token, value=value), query + + def search_function_value(self, query: str) -> tuple[FunctionValue, str]: + str_conversion_func_parser = self.platform_functions.manager.get_parser(AQLFunctionGroupType.str_conversion) + if str_conversion_func_parser and (func_match := str_conversion_func_parser.get_func_match(query)): + function = str_conversion_func_parser.parse(func_match.name, func_match.match) + return self._search_function_value(function, query) + + return super().search_function_value(query) + + def _check_function_value_match(self, query: str, white_space_pattern: str = r"\s+") -> bool: + single_value_operator_group = rf"(?:{'|'.join(self.single_value_operators_map)})" + single_value_pattern = rf"""{self.function_pattern}\s*{single_value_operator_group}\s*{self.value_pattern}\s*""" + if re.match(single_value_pattern, query, re.IGNORECASE): + return True + + if self.multi_value_operators_map: + multi_value_operator_group = rf"(?:{'|'.join(self.multi_value_operators_map)})" + pattern = f"{self.function_pattern}{white_space_pattern}{multi_value_operator_group}{white_space_pattern}" + multi_value_pattern = rf"{pattern}{self.multi_value_pattern}" + if re.match(multi_value_pattern, query, re.IGNORECASE): + return True + + return False diff --git a/uncoder-core/app/translator/platforms/base/lucene/parsers/lucene.py b/uncoder-core/app/translator/platforms/base/lucene/parsers/lucene.py index c748c1e4..5fb57284 100644 --- a/uncoder-core/app/translator/platforms/base/lucene/parsers/lucene.py +++ b/uncoder-core/app/translator/platforms/base/lucene/parsers/lucene.py @@ -47,9 +47,10 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]]]: def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer: query, log_sources = self._parse_query(raw_query_container.query) - tokens, source_mappings = self.get_tokens_and_source_mappings(query, log_sources) - fields_tokens = self.get_fields_tokens(tokens=tokens) + query_tokens = self.get_query_tokens(query) + field_tokens = self.get_field_tokens(query_tokens) + source_mappings = self.get_source_mappings(field_tokens, log_sources) meta_info = raw_query_container.meta_info - meta_info.query_fields = fields_tokens + meta_info.query_fields = field_tokens meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings] - return TokenizedQueryContainer(tokens=tokens, meta_info=meta_info) + return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info) diff --git a/uncoder-core/app/translator/platforms/base/lucene/tokenizer.py b/uncoder-core/app/translator/platforms/base/lucene/tokenizer.py index eb54b7ea..b56f5bee 100644 --- a/uncoder-core/app/translator/platforms/base/lucene/tokenizer.py +++ b/uncoder-core/app/translator/platforms/base/lucene/tokenizer.py @@ -19,11 +19,13 @@ import re from typing import ClassVar, Optional, Union +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.custom_types.tokens import OperatorType from app.translator.core.custom_types.values import ValueType from app.translator.core.mixins.logic import ANDLogicOperatorMixin -from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.identifier import Identifier +from app.translator.core.models.query_tokens.keyword import Keyword from app.translator.core.str_value_manager import StrValue from app.translator.core.tokenizer import QueryTokenizer from app.translator.platforms.base.lucene.escape_manager import lucene_escape_manager @@ -135,6 +137,6 @@ def _check_field_value_match(self, query: str, white_space_pattern: str = r"\s*" return super()._check_field_value_match(query, white_space_pattern=white_space_pattern) - def tokenize(self, query: str) -> list[Union[FieldValue, Keyword, Identifier]]: + def tokenize(self, query: str) -> list[QUERY_TOKEN_TYPE]: tokens = super().tokenize(query=query) return self.add_and_token_if_missed(tokens=tokens) diff --git a/uncoder-core/app/translator/platforms/base/spl/parsers/spl.py b/uncoder-core/app/translator/platforms/base/spl/parsers/spl.py index 92ba415d..27a1559d 100644 --- a/uncoder-core/app/translator/platforms/base/spl/parsers/spl.py +++ b/uncoder-core/app/translator/platforms/base/spl/parsers/spl.py @@ -67,10 +67,10 @@ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContain return self.platform_functions.parse_tstats_func(raw_query_container) query, log_sources, functions = self._parse_query(raw_query_container.query) - tokens, source_mappings = self.get_tokens_and_source_mappings(query, log_sources) - fields_tokens = self.get_fields_tokens(tokens=tokens) - self.set_functions_fields_generic_names(functions=functions, source_mappings=source_mappings) + query_tokens = self.get_query_tokens(query) + field_tokens = self.get_field_tokens(query_tokens, functions.functions) + source_mappings = self.get_source_mappings(field_tokens, log_sources) meta_info = raw_query_container.meta_info - meta_info.query_fields = fields_tokens + meta_info.query_fields = field_tokens meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings] - return TokenizedQueryContainer(tokens=tokens, meta_info=meta_info, functions=functions) + return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info, functions=functions) diff --git a/uncoder-core/app/translator/platforms/base/spl/tokenizer.py b/uncoder-core/app/translator/platforms/base/spl/tokenizer.py index 8a030519..57a5a695 100644 --- a/uncoder-core/app/translator/platforms/base/spl/tokenizer.py +++ b/uncoder-core/app/translator/platforms/base/spl/tokenizer.py @@ -17,13 +17,12 @@ """ import re -from typing import Any, ClassVar, Optional, Union +from typing import Any, ClassVar, Optional +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.custom_types.tokens import OperatorType from app.translator.core.custom_types.values import ValueType from app.translator.core.mixins.logic import ANDLogicOperatorMixin -from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.identifier import Identifier from app.translator.core.tokenizer import QueryTokenizer from app.translator.platforms.base.spl.const import DOUBLE_QUOTES_VALUE_PATTERN as D_Q_V_PATTERN from app.translator.platforms.base.spl.const import FIELD_PATTERN @@ -77,6 +76,6 @@ def get_operator_and_value( return super().get_operator_and_value(match, mapped_operator, operator) - def tokenize(self, query: str) -> list[Union[FieldValue, Keyword, Identifier]]: + def tokenize(self, query: str) -> list[QUERY_TOKEN_TYPE]: tokens = super().tokenize(query=query) return self.add_and_token_if_missed(tokens=tokens) diff --git a/uncoder-core/app/translator/platforms/base/sql/parsers/sql.py b/uncoder-core/app/translator/platforms/base/sql/parsers/sql.py index d324d4ba..4a882467 100644 --- a/uncoder-core/app/translator/platforms/base/sql/parsers/sql.py +++ b/uncoder-core/app/translator/platforms/base/sql/parsers/sql.py @@ -43,9 +43,10 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, Optional[str]]]: def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer: query, log_sources = self._parse_query(raw_query_container.query) - tokens, source_mappings = self.get_tokens_and_source_mappings(query, log_sources) - fields_tokens = self.get_fields_tokens(tokens=tokens) + query_tokens = self.get_query_tokens(query) + field_tokens = self.get_field_tokens(query_tokens) + source_mappings = self.get_source_mappings(field_tokens, log_sources) meta_info = raw_query_container.meta_info - meta_info.query_fields = fields_tokens + meta_info.query_fields = field_tokens meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings] - return TokenizedQueryContainer(tokens=tokens, meta_info=meta_info) + return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info) diff --git a/uncoder-core/app/translator/platforms/base/sql/tokenizer.py b/uncoder-core/app/translator/platforms/base/sql/tokenizer.py index 944d3c9b..8292ca14 100644 --- a/uncoder-core/app/translator/platforms/base/sql/tokenizer.py +++ b/uncoder-core/app/translator/platforms/base/sql/tokenizer.py @@ -21,8 +21,8 @@ from app.translator.core.custom_types.tokens import OperatorType from app.translator.core.custom_types.values import ValueType -from app.translator.core.models.field import FieldValue -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.identifier import Identifier from app.translator.core.tokenizer import QueryTokenizer from app.translator.tools.utils import get_match_group diff --git a/uncoder-core/app/translator/platforms/chronicle/parsers/chronicle.py b/uncoder-core/app/translator/platforms/chronicle/parsers/chronicle.py index 8c0e8431..b36d1197 100644 --- a/uncoder-core/app/translator/platforms/chronicle/parsers/chronicle.py +++ b/uncoder-core/app/translator/platforms/chronicle/parsers/chronicle.py @@ -34,9 +34,10 @@ class ChronicleQueryParser(PlatformQueryParser): wrapped_with_comment_pattern = r"^\s*//.*(?:\n|$)" def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer: - tokens, source_mappings = self.get_tokens_and_source_mappings(raw_query_container.query, {}) - fields_tokens = self.get_fields_tokens(tokens=tokens) + query_tokens = self.get_query_tokens(raw_query_container.query) + field_tokens = self.get_field_tokens(query_tokens) + source_mappings = self.get_source_mappings(field_tokens, {}) meta_info = raw_query_container.meta_info - meta_info.query_fields = fields_tokens + meta_info.query_fields = field_tokens meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings] - return TokenizedQueryContainer(tokens=tokens, meta_info=meta_info) + return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info) diff --git a/uncoder-core/app/translator/platforms/chronicle/tokenizer.py b/uncoder-core/app/translator/platforms/chronicle/tokenizer.py index 5278da4a..a0943952 100644 --- a/uncoder-core/app/translator/platforms/chronicle/tokenizer.py +++ b/uncoder-core/app/translator/platforms/chronicle/tokenizer.py @@ -21,8 +21,8 @@ from app.translator.core.custom_types.tokens import OperatorType from app.translator.core.custom_types.values import ValueType -from app.translator.core.models.field import FieldValue -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.identifier import Identifier from app.translator.core.tokenizer import QueryTokenizer from app.translator.platforms.chronicle.escape_manager import chronicle_escape_manager from app.translator.tools.utils import get_match_group diff --git a/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py b/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py index 272bdfdc..0696e2ba 100644 --- a/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py +++ b/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py @@ -18,16 +18,16 @@ from typing import Optional, Union from app.translator.const import DEFAULT_VALUE_TYPE -from app.translator.core.const import TOKEN_TYPE +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.custom_types.meta_info import SeverityType from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType, OperatorType from app.translator.core.custom_types.values import ValueType from app.translator.core.exceptions.render import UnsupportedRenderMethod from app.translator.core.mapping import SourceMapping -from app.translator.core.models.field import FieldValue -from app.translator.core.models.identifier import Identifier from app.translator.core.models.platform_details import PlatformDetails from app.translator.core.models.query_container import MetaInfoContainer, TokenizedQueryContainer +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.identifier import Identifier from app.translator.core.render import BaseFieldValueRender, PlatformQueryRender from app.translator.core.str_value_manager import StrValue from app.translator.managers import render_manager @@ -196,7 +196,7 @@ class FortiSiemRuleRender(PlatformQueryRender): field_value_render = FortiSiemFieldValueRender(or_token=or_token) @staticmethod - def __is_negated_token(prev_token: TOKEN_TYPE) -> bool: + def __is_negated_token(prev_token: QUERY_TOKEN_TYPE) -> bool: return isinstance(prev_token, Identifier) and prev_token.token_type == LogicalOperatorType.NOT @staticmethod @@ -207,7 +207,7 @@ def __should_negate(is_negated_token: bool = False, negation_ctx: bool = False) return is_negated_token or negation_ctx @staticmethod - def __negate_token(token: TOKEN_TYPE) -> None: + def __negate_token(token: QUERY_TOKEN_TYPE) -> None: if isinstance(token, Identifier): if token.token_type == LogicalOperatorType.AND: token.token_type = LogicalOperatorType.OR @@ -217,7 +217,7 @@ def __negate_token(token: TOKEN_TYPE) -> None: token_type = token.operator.token_type token.operator.token_type = _NOT_OPERATORS_MAP_SUBSTITUTES.get(token_type) or token_type - def __replace_not_tokens(self, tokens: list[TOKEN_TYPE]) -> list[TOKEN_TYPE]: + def __replace_not_tokens(self, tokens: list[QUERY_TOKEN_TYPE]) -> list[QUERY_TOKEN_TYPE]: not_token_indices = [] negation_ctx_stack = [] for index, token in enumerate(tokens[1:], start=1): diff --git a/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py b/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py index 7d3493c8..a38b8a64 100644 --- a/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py +++ b/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py @@ -20,15 +20,15 @@ from typing import Union from app.translator.const import DEFAULT_VALUE_TYPE +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.custom_types.tokens import LogicalOperatorType from app.translator.core.custom_types.values import ValueType from app.translator.core.exceptions.core import StrictPlatformException from app.translator.core.exceptions.render import BaseRenderException from app.translator.core.mapping import LogSourceSignature, SourceMapping -from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.identifier import Identifier from app.translator.core.models.platform_details import PlatformDetails from app.translator.core.models.query_container import TokenizedQueryContainer +from app.translator.core.models.query_tokens.field_value import FieldValue from app.translator.core.render import BaseFieldValueRender, PlatformQueryRender from app.translator.managers import render_manager from app.translator.platforms.logrhythm_axon.const import UNMAPPED_FIELD_DEFAULT_NAME, logrhythm_axon_query_details @@ -217,7 +217,7 @@ def _finalize_search_query(query: str) -> str: def generate_prefix(self, log_source_signature: LogSourceSignature, functions_prefix: str = "") -> str: # noqa: ARG002 return str(log_source_signature) - def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapping: SourceMapping) -> str: + def apply_token(self, token: QUERY_TOKEN_TYPE, source_mapping: SourceMapping) -> str: if isinstance(token, FieldValue) and token.field: try: mapped_fields = self.map_field(token.field, source_mapping) diff --git a/uncoder-core/app/translator/platforms/logscale/parsers/logscale.py b/uncoder-core/app/translator/platforms/logscale/parsers/logscale.py index e1015ff2..668796ae 100644 --- a/uncoder-core/app/translator/platforms/logscale/parsers/logscale.py +++ b/uncoder-core/app/translator/platforms/logscale/parsers/logscale.py @@ -42,10 +42,10 @@ def _parse_query(self, query: str) -> tuple[str, ParsedFunctions]: def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer: query, functions = self._parse_query(query=raw_query_container.query) - tokens, source_mappings = self.get_tokens_and_source_mappings(query, {}) - fields_tokens = self.get_fields_tokens(tokens=tokens) - self.set_functions_fields_generic_names(functions=functions, source_mappings=source_mappings) + query_tokens = self.get_query_tokens(query) + field_tokens = self.get_field_tokens(query_tokens, functions.functions) + source_mappings = self.get_source_mappings(field_tokens, {}) meta_info = raw_query_container.meta_info - meta_info.query_fields = fields_tokens + meta_info.query_fields = field_tokens meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings] - return TokenizedQueryContainer(tokens=tokens, meta_info=meta_info, functions=functions) + return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info, functions=functions) diff --git a/uncoder-core/app/translator/platforms/logscale/tokenizer.py b/uncoder-core/app/translator/platforms/logscale/tokenizer.py index c765c8a9..9c7c33e5 100644 --- a/uncoder-core/app/translator/platforms/logscale/tokenizer.py +++ b/uncoder-core/app/translator/platforms/logscale/tokenizer.py @@ -17,13 +17,13 @@ """ import re -from typing import Any, ClassVar, Optional, Union +from typing import Any, ClassVar, Optional +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.custom_types.tokens import LogicalOperatorType, OperatorType from app.translator.core.custom_types.values import ValueType from app.translator.core.mixins.logic import ANDLogicOperatorMixin -from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.identifier import Identifier from app.translator.core.tokenizer import QueryTokenizer from app.translator.platforms.logscale.escape_manager import logscale_escape_manager from app.translator.tools.utils import get_match_group @@ -71,6 +71,6 @@ def _get_next_token(self, query: str) -> (list, str): return super()._get_next_token(query) - def tokenize(self, query: str) -> list[Union[FieldValue, Keyword, Identifier]]: + def tokenize(self, query: str) -> list[QUERY_TOKEN_TYPE]: tokens = super().tokenize(query=query) return self.add_and_token_if_missed(tokens=tokens) diff --git a/uncoder-core/app/translator/platforms/microsoft/parsers/microsoft_sentinel.py b/uncoder-core/app/translator/platforms/microsoft/parsers/microsoft_sentinel.py index 507c8c17..2325367f 100644 --- a/uncoder-core/app/translator/platforms/microsoft/parsers/microsoft_sentinel.py +++ b/uncoder-core/app/translator/platforms/microsoft/parsers/microsoft_sentinel.py @@ -43,10 +43,10 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]], ParsedFun def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer: query, log_sources, functions = self._parse_query(query=raw_query_container.query) - tokens, source_mappings = self.get_tokens_and_source_mappings(query, log_sources) - fields_tokens = self.get_fields_tokens(tokens=tokens) - self.set_functions_fields_generic_names(functions=functions, source_mappings=source_mappings) + query_tokens = self.get_query_tokens(query) + field_tokens = self.get_field_tokens(query_tokens, functions.functions) + source_mappings = self.get_source_mappings(field_tokens, log_sources) meta_info = raw_query_container.meta_info - meta_info.query_fields = fields_tokens + meta_info.query_fields = field_tokens meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings] - return TokenizedQueryContainer(tokens=tokens, meta_info=meta_info, functions=functions) + return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info, functions=functions) diff --git a/uncoder-core/app/translator/platforms/opensearch/renders/opensearch_rule.py b/uncoder-core/app/translator/platforms/opensearch/renders/opensearch_rule.py index 09cd5b62..c5c67ed4 100644 --- a/uncoder-core/app/translator/platforms/opensearch/renders/opensearch_rule.py +++ b/uncoder-core/app/translator/platforms/opensearch/renders/opensearch_rule.py @@ -21,12 +21,12 @@ import json from typing import Optional, Union +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.custom_types.meta_info import SeverityType from app.translator.core.mapping import SourceMapping -from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.identifier import Identifier from app.translator.core.models.platform_details import PlatformDetails from app.translator.core.models.query_container import MetaInfoContainer, RawQueryContainer, TokenizedQueryContainer +from app.translator.core.models.query_tokens.field_value import FieldValue from app.translator.managers import render_manager from app.translator.platforms.opensearch.const import OPENSEARCH_RULE, opensearch_rule_details from app.translator.platforms.opensearch.mapping import OpenSearchMappings, opensearch_mappings @@ -78,7 +78,7 @@ def finalize_query( rule_str = json.dumps(rule, indent=4, sort_keys=False) return self.wrap_with_not_supported_functions(rule_str, not_supported_functions) - def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapping: SourceMapping) -> str: + def apply_token(self, token: QUERY_TOKEN_TYPE, source_mapping: SourceMapping) -> str: if isinstance(token, FieldValue) and token.field: for field in self.map_field(token.field, source_mapping): self.fields.update({field: f"{{ctx.results.0.hits.hits.0._source.{field}}}"}) diff --git a/uncoder-core/app/translator/platforms/palo_alto/functions/const.py b/uncoder-core/app/translator/platforms/palo_alto/functions/const.py index 8009a40e..95bb3982 100644 --- a/uncoder-core/app/translator/platforms/palo_alto/functions/const.py +++ b/uncoder-core/app/translator/platforms/palo_alto/functions/const.py @@ -31,8 +31,6 @@ class CortexXQLFunctionType(CustomEnum): timeframe = "timeframe" union = "union" - compare = "compare" - class XqlSortOrderType(CustomEnum): asc = "asc" diff --git a/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py b/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py index 708700f6..f2a74bc8 100644 --- a/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py +++ b/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py @@ -20,15 +20,15 @@ from typing import ClassVar, Optional, Union from app.translator.const import DEFAULT_VALUE_TYPE +from app.translator.core.const import QUERY_TOKEN_TYPE from app.translator.core.context_vars import preset_log_source_str_ctx_var, return_only_first_query_ctx_var from app.translator.core.custom_types.tokens import OperatorType from app.translator.core.custom_types.values import ValueType from app.translator.core.exceptions.core import StrictPlatformException from app.translator.core.mapping import DEFAULT_MAPPING_NAME, SourceMapping -from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.identifier import Identifier from app.translator.core.models.platform_details import PlatformDetails from app.translator.core.models.query_container import TokenizedQueryContainer +from app.translator.core.models.query_tokens.field_value import FieldValue from app.translator.core.render import BaseFieldFieldRender, BaseFieldValueRender, PlatformQueryRender from app.translator.core.str_value_manager import StrValue from app.translator.managers import render_manager @@ -208,7 +208,7 @@ def generate_prefix(self, log_source_signature: CortexXQLLogSourceSignature, fun log_source_str = preset_log_source_str_ctx_var.get() or str(log_source_signature) return f"{functions_prefix}{log_source_str}" - def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapping: SourceMapping) -> str: + def apply_token(self, token: QUERY_TOKEN_TYPE, source_mapping: SourceMapping) -> str: if isinstance(token, FieldValue) and token.field: field_name = token.field.source_name if values_map := SOURCE_MAPPING_TO_FIELD_VALUE_MAP.get(source_mapping.source_id, {}).get(field_name): diff --git a/uncoder-core/app/translator/platforms/sigma/models/compiler.py b/uncoder-core/app/translator/platforms/sigma/models/compiler.py index 2c0b6472..c6092498 100644 --- a/uncoder-core/app/translator/platforms/sigma/models/compiler.py +++ b/uncoder-core/app/translator/platforms/sigma/models/compiler.py @@ -19,8 +19,9 @@ from typing import Union from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType -from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.keyword import Keyword +from app.translator.core.models.query_tokens.identifier import Identifier from app.translator.platforms.sigma.models.group import Group from app.translator.platforms.sigma.models.operator import NOT, Operator diff --git a/uncoder-core/app/translator/platforms/sigma/models/modifiers.py b/uncoder-core/app/translator/platforms/sigma/models/modifiers.py index 446eb310..fa98c8ce 100644 --- a/uncoder-core/app/translator/platforms/sigma/models/modifiers.py +++ b/uncoder-core/app/translator/platforms/sigma/models/modifiers.py @@ -1,8 +1,8 @@ from typing import ClassVar, Optional, Union from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType, OperatorType -from app.translator.core.models.field import FieldValue -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.identifier import Identifier from app.translator.core.str_value_manager import StrValue from app.translator.platforms.sigma.str_value_manager import sigma_str_value_manager diff --git a/uncoder-core/app/translator/platforms/sigma/parsers/sigma.py b/uncoder-core/app/translator/platforms/sigma/parsers/sigma.py index 9f2fd7ab..5dd16651 100644 --- a/uncoder-core/app/translator/platforms/sigma/parsers/sigma.py +++ b/uncoder-core/app/translator/platforms/sigma/parsers/sigma.py @@ -21,7 +21,8 @@ from app.translator.core.exceptions.core import SigmaRuleValidationException from app.translator.core.mixins.rule import YamlRuleMixin -from app.translator.core.models.field import Field, FieldValue +from app.translator.core.models.query_tokens.field import Field +from app.translator.core.models.query_tokens.field_value import FieldValue from app.translator.core.models.platform_details import PlatformDetails from app.translator.core.models.query_container import MetaInfoContainer, RawQueryContainer, TokenizedQueryContainer from app.translator.core.parser import QueryParser diff --git a/uncoder-core/app/translator/platforms/sigma/renders/sigma.py b/uncoder-core/app/translator/platforms/sigma/renders/sigma.py index 856fd4a3..9eaae45c 100644 --- a/uncoder-core/app/translator/platforms/sigma/renders/sigma.py +++ b/uncoder-core/app/translator/platforms/sigma/renders/sigma.py @@ -24,7 +24,8 @@ from app.translator.core.custom_types.meta_info import SeverityType from app.translator.core.custom_types.tokens import OperatorType from app.translator.core.mapping import DEFAULT_MAPPING_NAME, SourceMapping -from app.translator.core.models.field import FieldValue, Keyword +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.keyword import Keyword from app.translator.core.models.platform_details import PlatformDetails from app.translator.core.models.query_container import RawQueryContainer, TokenizedQueryContainer from app.translator.core.render import QueryRender diff --git a/uncoder-core/app/translator/platforms/sigma/tokenizer.py b/uncoder-core/app/translator/platforms/sigma/tokenizer.py index 0893588f..faa0970a 100644 --- a/uncoder-core/app/translator/platforms/sigma/tokenizer.py +++ b/uncoder-core/app/translator/platforms/sigma/tokenizer.py @@ -21,8 +21,9 @@ from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType from app.translator.core.exceptions.parser import TokenizerGeneralException -from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.identifier import Identifier +from app.translator.core.models.query_tokens.field_value import FieldValue +from app.translator.core.models.query_tokens.keyword import Keyword +from app.translator.core.models.query_tokens.identifier import Identifier from app.translator.platforms.sigma.models.modifiers import ModifierManager pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy