Skip to content

Commit 428f231

Browse files
authored
Merge pull request #168 from UncoderIO/gis-8141
Gis 8141
2 parents 4b54f66 + 59cf1e7 commit 428f231

File tree

4 files changed

+110
-88
lines changed

4 files changed

+110
-88
lines changed

uncoder-core/app/translator/core/render.py

Lines changed: 28 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -412,37 +412,45 @@ def generate_raw_log_fields(self, fields: list[Field], source_mapping: SourceMap
412412
defined_raw_log_fields.append(prefix)
413413
return "\n".join(defined_raw_log_fields)
414414

415+
def _generate_from_tokenized_query_container_by_source_mapping(
416+
self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping
417+
) -> str:
418+
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
419+
prefix = self.generate_prefix(source_mapping.log_source_signature, rendered_functions.rendered_prefix)
420+
421+
if source_mapping.raw_log_fields:
422+
defined_raw_log_fields = self.generate_raw_log_fields(
423+
fields=query_container.meta_info.query_fields, source_mapping=source_mapping
424+
)
425+
prefix += f"\n{defined_raw_log_fields}"
426+
query = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)
427+
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
428+
return self.finalize_query(
429+
prefix=prefix,
430+
query=query,
431+
functions=rendered_functions.rendered,
432+
not_supported_functions=not_supported_functions,
433+
meta_info=query_container.meta_info,
434+
source_mapping=source_mapping,
435+
)
436+
415437
def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str:
416438
queries_map = {}
417439
errors = []
418440
source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids)
419441

420442
for source_mapping in source_mappings:
421-
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
422-
prefix = self.generate_prefix(source_mapping.log_source_signature, rendered_functions.rendered_prefix)
423443
try:
424-
if source_mapping.raw_log_fields:
425-
defined_raw_log_fields = self.generate_raw_log_fields(
426-
fields=query_container.meta_info.query_fields, source_mapping=source_mapping
427-
)
428-
prefix += f"\n{defined_raw_log_fields}"
429-
result = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)
430-
except StrictPlatformException as err:
431-
errors.append(err)
432-
continue
433-
else:
434-
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
435-
finalized_query = self.finalize_query(
436-
prefix=prefix,
437-
query=result,
438-
functions=rendered_functions.rendered,
439-
not_supported_functions=not_supported_functions,
440-
meta_info=query_container.meta_info,
441-
source_mapping=source_mapping,
444+
finalized_query = self._generate_from_tokenized_query_container_by_source_mapping(
445+
query_container, source_mapping
442446
)
443447
if return_only_first_query_ctx_var.get() is True:
444448
return finalized_query
445449
queries_map[source_mapping.source_id] = finalized_query
450+
except StrictPlatformException as err:
451+
errors.append(err)
452+
continue
453+
446454
if not queries_map and errors:
447455
raise errors[0]
448456
return self.finalize(queries_map)

uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py

Lines changed: 27 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919

2020
from app.translator.const import DEFAULT_VALUE_TYPE
2121
from app.translator.core.const import TOKEN_TYPE
22-
from app.translator.core.context_vars import return_only_first_query_ctx_var
2322
from app.translator.core.custom_types.meta_info import SeverityType
2423
from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType, OperatorType
2524
from app.translator.core.custom_types.values import ValueType
@@ -244,40 +243,33 @@ def __replace_not_tokens(self, tokens: list[TOKEN_TYPE]) -> list[TOKEN_TYPE]:
244243

245244
return tokens
246245

247-
def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str:
248-
queries_map = {}
249-
source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids)
250-
251-
for source_mapping in source_mappings:
252-
is_event_type_set = False
253-
field_values = [token for token in query_container.tokens if isinstance(token, FieldValue)]
254-
mapped_fields_set = set()
255-
for field_value in field_values:
256-
mapped_fields = self.map_field(field_value.field, source_mapping)
257-
mapped_fields_set = mapped_fields_set.union(set(mapped_fields))
258-
if _EVENT_TYPE_FIELD in mapped_fields:
259-
is_event_type_set = True
260-
self.__update_event_type_values(field_value, source_mapping.source_id)
261-
262-
tokens = self.__replace_not_tokens(query_container.tokens)
263-
result = self.generate_query(tokens=tokens, source_mapping=source_mapping)
264-
prefix = "" if is_event_type_set else self.generate_prefix(source_mapping.log_source_signature)
265-
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
266-
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
267-
finalized_query = self.finalize_query(
268-
prefix=prefix,
269-
query=result,
270-
functions=rendered_functions.rendered,
271-
not_supported_functions=not_supported_functions,
272-
meta_info=query_container.meta_info,
273-
source_mapping=source_mapping,
274-
fields=mapped_fields_set,
275-
)
276-
if return_only_first_query_ctx_var.get() is True:
277-
return finalized_query
278-
queries_map[source_mapping.source_id] = finalized_query
279-
280-
return self.finalize(queries_map)
246+
def _generate_from_tokenized_query_container_by_source_mapping(
247+
self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping
248+
) -> str:
249+
is_event_type_set = False
250+
field_values = [token for token in query_container.tokens if isinstance(token, FieldValue)]
251+
mapped_fields_set = set()
252+
for field_value in field_values:
253+
mapped_fields = self.map_field(field_value.field, source_mapping)
254+
mapped_fields_set = mapped_fields_set.union(set(mapped_fields))
255+
if _EVENT_TYPE_FIELD in mapped_fields:
256+
is_event_type_set = True
257+
self.__update_event_type_values(field_value, source_mapping.source_id)
258+
259+
tokens = self.__replace_not_tokens(query_container.tokens)
260+
result = self.generate_query(tokens=tokens, source_mapping=source_mapping)
261+
prefix = "" if is_event_type_set else self.generate_prefix(source_mapping.log_source_signature)
262+
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
263+
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
264+
return self.finalize_query(
265+
prefix=prefix,
266+
query=result,
267+
functions=rendered_functions.rendered,
268+
not_supported_functions=not_supported_functions,
269+
meta_info=query_container.meta_info,
270+
source_mapping=source_mapping,
271+
fields=mapped_fields_set,
272+
)
281273

282274
@staticmethod
283275
def __update_event_type_values(field_value: FieldValue, source_id: str) -> None:

uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py

Lines changed: 20 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
from typing import Union
2121

2222
from app.translator.const import DEFAULT_VALUE_TYPE
23-
from app.translator.core.context_vars import return_only_first_query_ctx_var
2423
from app.translator.core.custom_types.tokens import LogicalOperatorType
2524
from app.translator.core.custom_types.values import ValueType
2625
from app.translator.core.exceptions.core import StrictPlatformException
@@ -242,30 +241,23 @@ def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapp
242241

243242
return super().apply_token(token, source_mapping)
244243

245-
def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str:
246-
queries_map = {}
247-
source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids)
248-
249-
for source_mapping in source_mappings:
250-
prefix = self.generate_prefix(source_mapping.log_source_signature)
251-
if "product" in query_container.meta_info.parsed_logsources:
252-
prefix = f"{prefix} CONTAINS {query_container.meta_info.parsed_logsources['product'][0]}"
253-
else:
254-
prefix = f"{prefix} CONTAINS anything"
255-
256-
result = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)
257-
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
258-
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
259-
finalized_query = self.finalize_query(
260-
prefix=prefix,
261-
query=result,
262-
functions=rendered_functions.rendered,
263-
not_supported_functions=not_supported_functions,
264-
meta_info=query_container.meta_info,
265-
source_mapping=source_mapping,
266-
)
267-
if return_only_first_query_ctx_var.get() is True:
268-
return finalized_query
269-
queries_map[source_mapping.source_id] = finalized_query
270-
271-
return self.finalize(queries_map)
244+
def _generate_from_tokenized_query_container_by_source_mapping(
245+
self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping
246+
) -> str:
247+
prefix = self.generate_prefix(source_mapping.log_source_signature)
248+
if "product" in query_container.meta_info.parsed_logsources:
249+
prefix = f"{prefix} CONTAINS {query_container.meta_info.parsed_logsources['product'][0]}"
250+
else:
251+
prefix = f"{prefix} CONTAINS anything"
252+
253+
result = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)
254+
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
255+
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
256+
return self.finalize_query(
257+
prefix=prefix,
258+
query=result,
259+
functions=rendered_functions.rendered,
260+
not_supported_functions=not_supported_functions,
261+
meta_info=query_container.meta_info,
262+
source_mapping=source_mapping,
263+
)

uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py

Lines changed: 35 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,17 +16,19 @@
1616
limitations under the License.
1717
-----------------------------------------------------------------
1818
"""
19-
19+
from contextlib import suppress
2020
from typing import ClassVar, Optional, Union
2121

2222
from app.translator.const import DEFAULT_VALUE_TYPE
23-
from app.translator.core.context_vars import preset_log_source_str_ctx_var
23+
from app.translator.core.context_vars import preset_log_source_str_ctx_var, return_only_first_query_ctx_var
2424
from app.translator.core.custom_types.tokens import OperatorType
2525
from app.translator.core.custom_types.values import ValueType
26-
from app.translator.core.mapping import SourceMapping
26+
from app.translator.core.exceptions.core import StrictPlatformException
27+
from app.translator.core.mapping import DEFAULT_MAPPING_NAME, SourceMapping
2728
from app.translator.core.models.field import FieldValue, Keyword
2829
from app.translator.core.models.identifier import Identifier
2930
from app.translator.core.models.platform_details import PlatformDetails
31+
from app.translator.core.models.query_container import TokenizedQueryContainer
3032
from app.translator.core.render import BaseFieldFieldRender, BaseFieldValueRender, PlatformQueryRender
3133
from app.translator.core.str_value_manager import StrValue
3234
from app.translator.managers import render_manager
@@ -71,8 +73,7 @@ def _wrap_str_value(value: str) -> str:
7173
def equal_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str:
7274
if isinstance(value, list):
7375
values = ", ".join(
74-
f"{self._pre_process_value(field, str(v) if isinstance(v, int) else v, ValueType.value, True)}"
75-
for v in value
76+
f"{self._pre_process_value(field, str(v), value_type=ValueType.value, wrap_str=True)}" for v in value
7677
)
7778
return f"{field} in ({values})"
7879

@@ -223,3 +224,32 @@ def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapp
223224
@staticmethod
224225
def _finalize_search_query(query: str) -> str:
225226
return f"| filter {query}" if query else ""
227+
228+
def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str:
229+
queries_map = {}
230+
errors = []
231+
source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids)
232+
233+
last_mapping_index = len(source_mappings) - 1
234+
for index, source_mapping in enumerate(source_mappings):
235+
try:
236+
finalized_query = self._generate_from_tokenized_query_container_by_source_mapping(
237+
query_container, source_mapping
238+
)
239+
if return_only_first_query_ctx_var.get() is True:
240+
return finalized_query
241+
queries_map[source_mapping.source_id] = finalized_query
242+
except StrictPlatformException as err:
243+
errors.append(err)
244+
if index != last_mapping_index or source_mapping.source_id == DEFAULT_MAPPING_NAME or queries_map:
245+
continue
246+
247+
with suppress(StrictPlatformException):
248+
finalized_query = self._generate_from_tokenized_query_container_by_source_mapping(
249+
query_container, self.mappings.get_source_mapping(DEFAULT_MAPPING_NAME)
250+
)
251+
queries_map[source_mapping.source_id] = finalized_query
252+
253+
if not queries_map and errors:
254+
raise errors[0]
255+
return self.finalize(queries_map)

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy