Skip to content

separate field tokens #208

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Dec 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions uncoder-core/app/translator/core/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,10 @@ def order_to_render(self) -> dict[str, int]:

return {}

@property
def supported_render_names(self) -> set[str]:
return set(self._renders_map)


class PlatformFunctions:
dir_path: str = None
Expand Down
28 changes: 22 additions & 6 deletions uncoder-core/app/translator/core/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,13 +188,22 @@ def get_source_mapping(self, source_id: str) -> Optional[SourceMapping]:
def default_mapping(self) -> SourceMapping:
return self._source_mappings[DEFAULT_MAPPING_NAME]

def check_fields_mapping_existence(self, field_tokens: list[Field], source_mapping: SourceMapping) -> list[str]:
def check_fields_mapping_existence(
self,
query_field_tokens: list[Field],
function_field_tokens_map: dict[str, list[Field]],
supported_func_render_names: set[str],
source_mapping: SourceMapping,
) -> list[str]:
unmapped = []
for field in field_tokens:
generic_field_name = field.get_generic_field_name(source_mapping.source_id)
mapped_field = source_mapping.fields_mapping.get_platform_field_name(generic_field_name=generic_field_name)
if not mapped_field and field.source_name not in unmapped:
unmapped.append(field.source_name)

for field in query_field_tokens:
self._check_field_mapping_existence(field, source_mapping, unmapped)

for func_name, function_field_tokens in function_field_tokens_map.items():
if func_name in supported_func_render_names:
for field in function_field_tokens:
self._check_field_mapping_existence(field, source_mapping, unmapped)

if self.is_strict_mapping and unmapped:
raise StrictPlatformException(
Expand All @@ -203,6 +212,13 @@ def check_fields_mapping_existence(self, field_tokens: list[Field], source_mappi

return unmapped

@staticmethod
def _check_field_mapping_existence(field: Field, source_mapping: SourceMapping, unmapped: list[str]) -> None:
generic_field_name = field.get_generic_field_name(source_mapping.source_id)
mapped_field = source_mapping.fields_mapping.get_platform_field_name(generic_field_name=generic_field_name)
if not mapped_field and field.source_name not in unmapped:
unmapped.append(field.source_name)

@staticmethod
def map_field(field: Field, source_mapping: SourceMapping) -> list[str]:
generic_field_name = field.get_generic_field_name(source_mapping.source_id)
Expand Down
4 changes: 4 additions & 0 deletions uncoder-core/app/translator/core/models/query_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ def __init__(
date: Optional[str] = None,
output_table_fields: Optional[list[Field]] = None,
query_fields: Optional[list[Field]] = None,
function_fields: Optional[list[Field]] = None,
function_fields_map: Optional[dict[str, list[Field]]] = None,
license_: Optional[str] = None,
severity: Optional[str] = None,
references: Optional[list[str]] = None,
Expand All @@ -90,6 +92,8 @@ def __init__(
self.date = date or datetime.now().date().strftime("%Y-%m-%d")
self.output_table_fields = output_table_fields or []
self.query_fields = query_fields or []
self.function_fields = function_fields or []
self.function_fields_map = function_fields_map or {}
self.license = license_ or "DRL 1.1"
self.severity = severity or SeverityType.low
self.references = references or []
Expand Down
15 changes: 9 additions & 6 deletions uncoder-core/app/translator/core/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,16 +65,19 @@ def get_query_tokens(self, query: str) -> list[QUERY_TOKEN_TYPE]:
@staticmethod
def get_field_tokens(
query_tokens: list[QUERY_TOKEN_TYPE], functions: Optional[list[Function]] = None
) -> list[Field]:
field_tokens = []
) -> tuple[list[Field], list[Field], dict[str, list[Field]]]:
query_field_tokens = []
function_field_tokens = []
function_field_tokens_map = {}
for token in query_tokens:
if isinstance(token, (FieldField, FieldValue, FunctionValue)):
field_tokens.extend(token.fields)
query_field_tokens.extend(token.fields)

if functions:
field_tokens.extend([field for func in functions for field in func.fields])
for func in functions or []:
function_field_tokens.extend(func.fields)
function_field_tokens_map[func.name] = func.fields

return field_tokens
return query_field_tokens, function_field_tokens, function_field_tokens_map

def get_source_mappings(
self, field_tokens: list[Field], log_sources: dict[str, list[Union[int, str]]]
Expand Down
8 changes: 6 additions & 2 deletions uncoder-core/app/translator/core/render.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,14 +428,18 @@ def _generate_from_tokenized_query_container_by_source_mapping(
self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping
) -> str:
unmapped_fields = self.mappings.check_fields_mapping_existence(
query_container.meta_info.query_fields, source_mapping
query_container.meta_info.query_fields,
query_container.meta_info.function_fields_map,
self.platform_functions.manager.supported_render_names,
source_mapping,
)
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
prefix = self.generate_prefix(source_mapping.log_source_signature, rendered_functions.rendered_prefix)

if source_mapping.raw_log_fields:
defined_raw_log_fields = self.generate_raw_log_fields(
fields=query_container.meta_info.query_fields, source_mapping=source_mapping
fields=query_container.meta_info.query_fields + query_container.meta_info.function_fields,
source_mapping=source_mapping,
)
prefix += f"\n{defined_raw_log_fields}"
query = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)
Expand Down
10 changes: 7 additions & 3 deletions uncoder-core/app/translator/platforms/base/aql/parsers/aql.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,13 @@ def _parse_query(self, text: str) -> tuple[str, dict[str, Union[list[str], list[
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
query, log_sources, functions = self._parse_query(raw_query_container.query)
query_tokens = self.get_query_tokens(query)
field_tokens = self.get_field_tokens(query_tokens, functions.functions)
source_mappings = self.get_source_mappings(field_tokens, log_sources)
query_field_tokens, function_field_tokens, function_field_tokens_map = self.get_field_tokens(
query_tokens, functions.functions
)
source_mappings = self.get_source_mappings(query_field_tokens + function_field_tokens, log_sources)
meta_info = raw_query_container.meta_info
meta_info.query_fields = field_tokens
meta_info.query_fields = query_field_tokens
meta_info.function_fields = function_field_tokens
meta_info.function_fields_map = function_field_tokens_map
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info, functions=functions)
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,9 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]]]:
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
query, log_sources = self._parse_query(raw_query_container.query)
query_tokens = self.get_query_tokens(query)
field_tokens = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(field_tokens, log_sources)
query_field_tokens, _, _ = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(query_field_tokens, log_sources)
meta_info = raw_query_container.meta_info
meta_info.query_fields = field_tokens
meta_info.query_fields = query_field_tokens
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info)
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ def parse(self, query: str) -> tuple[str, ParsedFunctions]:
functions = query.split(self.function_delimiter)
result_query = self.prepare_query(functions[0])
for func in functions[1:]:
split_func = func.strip().split(" ")
func = func.strip()
split_func = func.split(" ")
func_name, func_body = split_func[0], " ".join(split_func[1:])
try:
func_parser = self.manager.get_hof_parser(func_name)
Expand Down
14 changes: 9 additions & 5 deletions uncoder-core/app/translator/platforms/base/spl/parsers/spl.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@

class SplQueryParser(PlatformQueryParser):
log_source_pattern = r"^___source_type___\s*=\s*(?:\"(?P<d_q_value>[%a-zA-Z_*:0-9\-/]+)\"|(?P<value>[%a-zA-Z_*:0-9\-/]+))(?:\s+(?:and|or)\s+|\s+)?" # noqa: E501
rule_name_pattern = r"`(?P<name>(?:[:a-zA-Z*0-9=+%#\-_/,;`?~‘\'.<>$&^@!\]\[()\s])*)`"
rule_name_pattern = r"`(?P<name>(?:[:a-zA-Z*0-9=+%#\-_/,;`?~‘\'.<>$&^@!\]\[()\s])*)`" # noqa: RUF001
log_source_key_types = ("index", "source", "sourcetype", "sourcecategory")

platform_functions: SplFunctions = None
Expand All @@ -56,7 +56,7 @@ def _parse_log_sources(self, query: str) -> tuple[dict[str, list[str]], str]:
def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]], ParsedFunctions]:
if re.match(self.rule_name_pattern, query):
search = re.search(self.rule_name_pattern, query, flags=re.IGNORECASE)
query = query[:search.start()] + query[search.end():]
query = query[: search.start()] + query[search.end() :]
query = query.strip()
log_sources, query = self._parse_log_sources(query)
query, functions = self.platform_functions.parse(query)
Expand All @@ -72,9 +72,13 @@ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContain

query, log_sources, functions = self._parse_query(raw_query_container.query)
query_tokens = self.get_query_tokens(query)
field_tokens = self.get_field_tokens(query_tokens, functions.functions)
source_mappings = self.get_source_mappings(field_tokens, log_sources)
query_field_tokens, function_field_tokens, function_field_tokens_map = self.get_field_tokens(
query_tokens, functions.functions
)
source_mappings = self.get_source_mappings(query_field_tokens + function_field_tokens, log_sources)
meta_info = raw_query_container.meta_info
meta_info.query_fields = field_tokens
meta_info.query_fields = query_field_tokens
meta_info.function_fields = function_field_tokens
meta_info.function_fields_map = function_field_tokens_map
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info, functions=functions)
6 changes: 3 additions & 3 deletions uncoder-core/app/translator/platforms/base/sql/parsers/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]]]:
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
query, log_sources = self._parse_query(raw_query_container.query)
query_tokens = self.get_query_tokens(query)
field_tokens = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(field_tokens, log_sources)
query_field_tokens, _, _ = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(query_field_tokens, log_sources)
meta_info = raw_query_container.meta_info
meta_info.query_fields = field_tokens
meta_info.query_fields = query_field_tokens
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info)
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,9 @@ class ChronicleQueryParser(PlatformQueryParser):

def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
query_tokens = self.get_query_tokens(raw_query_container.query)
field_tokens = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(field_tokens, {})
query_field_tokens, _, _ = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(query_field_tokens, {})
meta_info = raw_query_container.meta_info
meta_info.query_fields = field_tokens
meta_info.query_fields = query_field_tokens
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info)
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]]]:
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
query, log_sources = self._parse_query(raw_query_container.query)
query_tokens = self.get_query_tokens(query)
field_tokens = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(field_tokens, log_sources)
query_field_tokens, _, _ = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(query_field_tokens, log_sources)
meta_info = raw_query_container.meta_info
meta_info.query_fields = field_tokens
meta_info.query_fields = query_field_tokens
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info)
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,10 @@ def _generate_from_tokenized_query_container_by_source_mapping(
self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping
) -> str:
unmapped_fields = self.mappings.check_fields_mapping_existence(
query_container.meta_info.query_fields, source_mapping
query_container.meta_info.query_fields,
query_container.meta_info.function_fields_map,
self.platform_functions.manager.supported_render_names,
source_mapping,
)
is_event_type_set = False
field_values = [token for token in query_container.tokens if isinstance(token, FieldValue)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,10 @@ def _generate_from_tokenized_query_container_by_source_mapping(
self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping
) -> str:
unmapped_fields = self.mappings.check_fields_mapping_existence(
query_container.meta_info.query_fields, source_mapping
query_container.meta_info.query_fields,
query_container.meta_info.function_fields_map,
self.platform_functions.manager.supported_render_names,
source_mapping,
)
prefix = self.generate_prefix(source_mapping.log_source_signature)
if "product" in query_container.meta_info.parsed_logsources:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,13 @@ def _parse_query(self, query: str) -> tuple[str, ParsedFunctions]:
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
query, functions = self._parse_query(query=raw_query_container.query)
query_tokens = self.get_query_tokens(query)
field_tokens = self.get_field_tokens(query_tokens, functions.functions)
source_mappings = self.get_source_mappings(field_tokens, {})
query_field_tokens, function_field_tokens, function_field_tokens_map = self.get_field_tokens(
query_tokens, functions.functions
)
source_mappings = self.get_source_mappings(query_field_tokens + function_field_tokens, {})
meta_info = raw_query_container.meta_info
meta_info.query_fields = field_tokens
meta_info.query_fields = query_field_tokens
meta_info.function_fields = function_field_tokens
meta_info.function_fields_map = function_field_tokens_map
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info, functions=functions)
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ def parse(self, query: str) -> tuple[str, str, ParsedFunctions]:
table = split_query[0].strip()
query_parts = []
for func in split_query[1:]:
split_func = func.strip(" ").split(" ")
func = func.strip()
split_func = func.split(" ")
func_name, func_body = split_func[0], " ".join(split_func[1:])
if func_name == KQLFunctionType.where:
query_parts.append(func_body)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,13 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]], ParsedFun
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
query, log_sources, functions = self._parse_query(query=raw_query_container.query)
query_tokens = self.get_query_tokens(query)
field_tokens = self.get_field_tokens(query_tokens, functions.functions)
source_mappings = self.get_source_mappings(field_tokens, log_sources)
query_field_tokens, function_field_tokens, function_field_tokens_map = self.get_field_tokens(
query_tokens, functions.functions
)
source_mappings = self.get_source_mappings(query_field_tokens + function_field_tokens, log_sources)
meta_info = raw_query_container.meta_info
meta_info.query_fields = field_tokens
meta_info.query_fields = query_field_tokens
meta_info.function_fields = function_field_tokens
meta_info.function_fields_map = function_field_tokens_map
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info, functions=functions)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy