Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions translator/app/translator/core/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from settings import INIT_FUNCTIONS

if TYPE_CHECKING:
from app.translator.core.render import BaseQueryRender
from app.translator.core.render import PlatformQueryRender


class FunctionParser(ABC):
Expand Down Expand Up @@ -72,7 +72,7 @@ def __init__(self):
self._names_map: dict[str, str] = {}

@abstractmethod
def init_search_func_render(self, platform_render: BaseQueryRender) -> None:
def init_search_func_render(self, platform_render: PlatformQueryRender) -> None:
raise NotImplementedError

@cached_property
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from app.translator.core.mapping import DEFAULT_MAPPING_NAME
from app.translator.core.models.field import Field
from app.translator.core.models.functions.base import ParsedFunctions
from app.translator.core.tokenizer import TOKEN_TYPE


class MetaInfoContainer:
Expand All @@ -27,15 +28,15 @@ def __init__(
status: Optional[str] = None,
false_positives: Optional[list[str]] = None,
source_mapping_ids: Optional[list[str]] = None,
parsed_logsources: Optional[dict] = None
parsed_logsources: Optional[dict] = None,
) -> None:
self.id = id_ or str(uuid.uuid4())
self.title = title or ""
self.description = description or ""
self.author = author or ""
self.date = date or datetime.now().date().strftime("%Y-%m-%d")
self.license = license_ or "DRL 1.1"
self.fields = fields or []
self.license = license_ or "DRL 1.1"
self.severity = severity or SeverityType.low
self.references = references or []
self.tags = tags or []
Expand All @@ -47,7 +48,14 @@ def __init__(


@dataclass
class SiemContainer:
query: list
class RawQueryContainer:
query: str
language: str
meta_info: MetaInfoContainer = field(default_factory=MetaInfoContainer)


@dataclass
class TokenizedQueryContainer:
tokens: list[TOKEN_TYPE]
meta_info: MetaInfoContainer
functions: ParsedFunctions = field(default_factory=ParsedFunctions)
24 changes: 13 additions & 11 deletions translator/app/translator/core/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,33 +17,35 @@
"""

from abc import ABC, abstractmethod
from typing import Union

from app.translator.core.exceptions.parser import TokenizerGeneralException
from app.translator.core.functions import PlatformFunctions
from app.translator.core.mapping import BasePlatformMappings, SourceMapping
from app.translator.core.models.field import FieldValue
from app.translator.core.models.functions.base import ParsedFunctions
from app.translator.core.models.parser_output import MetaInfoContainer, SiemContainer
from app.translator.core.models.platform_details import PlatformDetails
from app.translator.core.models.query_container import RawQueryContainer, TokenizedQueryContainer
from app.translator.core.tokenizer import TOKEN_TYPE, QueryTokenizer


class Parser(ABC):
mappings: BasePlatformMappings = None
tokenizer: QueryTokenizer = None
details: PlatformDetails = None
platform_functions: PlatformFunctions = None
class QueryParser(ABC):
def parse_raw_query(self, text: str, language: str) -> RawQueryContainer:
return RawQueryContainer(query=text, language=language)

@abstractmethod
def _get_meta_info(self, *args, **kwargs) -> MetaInfoContainer:
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
raise NotImplementedError("Abstract method")

@abstractmethod
def parse(self, text: str) -> SiemContainer:
raise NotImplementedError("Abstract method")

class PlatformQueryParser(QueryParser, ABC):
mappings: BasePlatformMappings = None
tokenizer: QueryTokenizer = None
details: PlatformDetails = None
platform_functions: PlatformFunctions = None

def get_tokens_and_source_mappings(
self, query: str, log_sources: dict[str, list[str]]
self, query: str, log_sources: dict[str, Union[str, list[str]]]
) -> tuple[list[TOKEN_TYPE], list[SourceMapping]]:
if not query:
raise TokenizerGeneralException("Can't translate empty query. Please provide more details")
Expand Down
43 changes: 31 additions & 12 deletions translator/app/translator/core/render.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@
limitations under the License.
-----------------------------------------------------------------
"""
from abc import ABC

from abc import ABC, abstractmethod
from collections.abc import Callable
from typing import Optional, Union

Expand All @@ -29,11 +30,12 @@
from app.translator.core.functions import PlatformFunctions
from app.translator.core.mapping import DEFAULT_MAPPING_NAME, BasePlatformMappings, LogSourceSignature, SourceMapping
from app.translator.core.models.field import Field, FieldValue, Keyword
from app.translator.core.models.functions.base import Function, ParsedFunctions
from app.translator.core.models.functions.base import Function
from app.translator.core.models.identifier import Identifier
from app.translator.core.models.parser_output import MetaInfoContainer
from app.translator.core.models.platform_details import PlatformDetails
from app.translator.core.models.query_container import MetaInfoContainer, RawQueryContainer, TokenizedQueryContainer
from app.translator.core.str_value_manager import StrValueManager
from app.translator.core.tokenizer import TOKEN_TYPE


class BaseQueryFieldValue(ABC):
Expand Down Expand Up @@ -99,7 +101,13 @@ def apply_field_value(self, field: str, operator: Identifier, value: DEFAULT_VAL
raise UnsupportedOperatorException(operator.token_type)


class BaseQueryRender:
class QueryRender(ABC):
@abstractmethod
def generate(self, query_container: Union[RawQueryContainer, TokenizedQueryContainer]) -> str:
raise NotImplementedError("Abstract method")


class PlatformQueryRender(QueryRender):
mappings: BasePlatformMappings = None
details: PlatformDetails = None
is_strict_mapping = False
Expand Down Expand Up @@ -168,9 +176,9 @@ def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapp

return token.token_type

def generate_query(self, query: list[Union[FieldValue, Keyword, Identifier]], source_mapping: SourceMapping) -> str:
def generate_query(self, tokens: list[TOKEN_TYPE], source_mapping: SourceMapping) -> str:
result_values = []
for token in query:
for token in tokens:
result_values.append(self.apply_token(token=token, source_mapping=source_mapping))
return "".join(result_values)

Expand Down Expand Up @@ -243,22 +251,33 @@ def _get_source_mappings(self, source_mapping_ids: list[str]) -> list[SourceMapp

return source_mappings

def generate(self, query: list, meta_info: MetaInfoContainer, functions: ParsedFunctions) -> str:
def _generate_from_raw_query_container(self, query_container: RawQueryContainer) -> str:
return self.finalize_query(
prefix="", query=query_container.query, functions="", meta_info=query_container.meta_info
)

def _generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str:
queries_map = {}
source_mappings = self._get_source_mappings(meta_info.source_mapping_ids)
source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids)

for source_mapping in source_mappings:
prefix = self.generate_prefix(source_mapping.log_source_signature)
result = self.generate_query(query=query, source_mapping=source_mapping)
result = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)

finalized_query = self.finalize_query(
prefix=prefix,
query=result,
functions=self.generate_functions(functions.functions, source_mapping),
not_supported_functions=functions.not_supported,
meta_info=meta_info,
functions=self.generate_functions(query_container.functions.functions, source_mapping),
not_supported_functions=query_container.functions.not_supported,
meta_info=query_container.meta_info,
source_mapping=source_mapping,
)
queries_map[source_mapping.source_id] = finalized_query

return self.finalize(queries_map)

def generate(self, query_container: Union[RawQueryContainer, TokenizedQueryContainer]) -> str:
if isinstance(query_container, RawQueryContainer):
return self._generate_from_raw_query_container(query_container)

return self._generate_from_tokenized_query_container(query_container)
1 change: 0 additions & 1 deletion translator/app/translator/core/render_cti.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
-----------------------------------------------------------------
"""


from app.translator.core.models.iocs import IocsChunkValue


Expand Down
50 changes: 25 additions & 25 deletions translator/app/translator/platforms/__init__.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
from app.translator.platforms.athena.parsers.athena import AthenaParser
from app.translator.platforms.athena.parsers.athena import AthenaQueryParser
from app.translator.platforms.athena.renders.athena import AthenaQueryRender
from app.translator.platforms.athena.renders.athena_cti import AthenaCTI
from app.translator.platforms.carbonblack.renders.carbonblack_cti import CarbonBlackCTI
from app.translator.platforms.chronicle.parsers.chronicle import ChronicleParser
from app.translator.platforms.chronicle.parsers.chronicle import ChronicleQueryParser
from app.translator.platforms.chronicle.parsers.chronicle_rule import ChronicleRuleParser
from app.translator.platforms.chronicle.renders.chronicle import ChronicleQueryRender
from app.translator.platforms.chronicle.renders.chronicle_cti import ChronicleQueryCTI
from app.translator.platforms.chronicle.renders.chronicle_rule import ChronicleSecurityRuleRender
from app.translator.platforms.crowdstrike.parsers.crowdstrike import CrowdStrikeParser
from app.translator.platforms.crowdstrike.parsers.crowdstrike import CrowdStrikeQueryParser
from app.translator.platforms.crowdstrike.renders.crowdstrike import CrowdStrikeQueryRender
from app.translator.platforms.crowdstrike.renders.crowdstrike_cti import CrowdStrikeCTI
from app.translator.platforms.elasticsearch.parsers.detection_rule import ElasticSearchRuleParser
from app.translator.platforms.elasticsearch.parsers.elasticsearch import ElasticSearchParser
from app.translator.platforms.elasticsearch.parsers.elasticsearch import ElasticSearchQueryParser
from app.translator.platforms.elasticsearch.renders.detection_rule import ElasticSearchRuleRender
from app.translator.platforms.elasticsearch.renders.elast_alert import ElastAlertRuleRender
from app.translator.platforms.elasticsearch.renders.elasticsearch import ElasticSearchQueryRender
Expand All @@ -20,30 +20,30 @@
from app.translator.platforms.elasticsearch.renders.xpack_watcher import XPackWatcherRuleRender
from app.translator.platforms.fireeye_helix.renders.fireeye_helix_cti import FireeyeHelixCTI
from app.translator.platforms.forti_siem.renders.forti_siem_rule import FortiSiemRuleRender
from app.translator.platforms.graylog.parsers.graylog import GraylogParser
from app.translator.platforms.graylog.renders.graylog import GraylogRender
from app.translator.platforms.graylog.parsers.graylog import GraylogQueryParser
from app.translator.platforms.graylog.renders.graylog import GraylogQueryRender
from app.translator.platforms.graylog.renders.graylog_cti import GraylogCTI
from app.translator.platforms.logpoint.renders.logpoint_cti import LogpointCTI
from app.translator.platforms.logrhythm_axon.renders.logrhythm_axon_query import LogRhythmAxonQueryRender
from app.translator.platforms.logrhythm_axon.renders.logrhythm_axon_rule import LogRhythmAxonRuleRender
from app.translator.platforms.logscale.parsers.logscale import LogScaleParser
from app.translator.platforms.logscale.parsers.logscale import LogScaleQueryParser
from app.translator.platforms.logscale.parsers.logscale_alert import LogScaleAlertParser
from app.translator.platforms.logscale.renders.logscale_cti import LogScaleCTI
from app.translator.platforms.logscale.renders.logscale import LogScaleQueryRender
from app.translator.platforms.logscale.renders.logscale_alert import LogScaleAlertRender
from app.translator.platforms.logscale.renders.logscale_cti import LogScaleCTI
from app.translator.platforms.microsoft.parsers.microsoft_defender import MicrosoftDefenderQueryParser
from app.translator.platforms.microsoft.parsers.microsoft_sentinel import MicrosoftParser
from app.translator.platforms.microsoft.parsers.microsoft_sentinel_rule import MicrosoftRuleParser
from app.translator.platforms.microsoft.parsers.microsoft_sentinel import MicrosoftSentinelQueryParser
from app.translator.platforms.microsoft.parsers.microsoft_sentinel_rule import MicrosoftSentinelRuleParser
from app.translator.platforms.microsoft.renders.microsoft_defender import MicrosoftDefenderQueryRender
from app.translator.platforms.microsoft.renders.microsoft_defender_cti import MicrosoftDefenderCTI
from app.translator.platforms.microsoft.renders.microsoft_sentinel import MicrosoftSentinelQueryRender
from app.translator.platforms.microsoft.renders.microsoft_sentinel_cti import MicrosoftSentinelCTI
from app.translator.platforms.microsoft.renders.microsoft_sentinel_rule import MicrosoftSentinelRuleRender
from app.translator.platforms.opensearch.parsers.opensearch import OpenSearchParser
from app.translator.platforms.opensearch.parsers.opensearch import OpenSearchQueryParser
from app.translator.platforms.opensearch.renders.opensearch import OpenSearchQueryRender
from app.translator.platforms.opensearch.renders.opensearch_cti import OpenSearchCTI
from app.translator.platforms.opensearch.renders.opensearch_rule import OpenSearchRuleRender
from app.translator.platforms.qradar.parsers.qradar import QradarParser
from app.translator.platforms.qradar.parsers.qradar import QradarQueryParser
from app.translator.platforms.qradar.renders.qradar import QradarQueryRender
from app.translator.platforms.qradar.renders.qradar_cti import QRadarCTI
from app.translator.platforms.qualys.renders.qualys_cti import QualysCTI
Expand All @@ -53,7 +53,7 @@
from app.translator.platforms.sigma.parsers.sigma import SigmaParser
from app.translator.platforms.sigma.renders.sigma import SigmaRender
from app.translator.platforms.snowflake.renders.snowflake_cti import SnowflakeCTI
from app.translator.platforms.splunk.parsers.splunk import SplunkParser
from app.translator.platforms.splunk.parsers.splunk import SplunkQueryParser
from app.translator.platforms.splunk.parsers.splunk_alert import SplunkAlertParser
from app.translator.platforms.splunk.renders.splunk import SplunkQueryRender
from app.translator.platforms.splunk.renders.splunk_alert import SplunkAlertRender
Expand Down Expand Up @@ -83,28 +83,28 @@
XPackWatcherRuleRender(),
OpenSearchQueryRender(),
OpenSearchRuleRender(),
GraylogRender(),
GraylogQueryRender(),
FortiSiemRuleRender(),
)

__ALL_PARSERS = (
AthenaParser(),
ChronicleParser(),
AthenaQueryParser(),
ChronicleQueryParser(),
ChronicleRuleParser(),
SplunkParser(),
SplunkQueryParser(),
SplunkAlertParser(),
SigmaParser(),
QradarParser(),
MicrosoftParser(),
MicrosoftRuleParser(),
QradarQueryParser(),
MicrosoftSentinelQueryParser(),
MicrosoftSentinelRuleParser(),
MicrosoftDefenderQueryParser(),
CrowdStrikeParser(),
LogScaleParser(),
CrowdStrikeQueryParser(),
LogScaleQueryParser(),
LogScaleAlertParser(),
ElasticSearchParser(),
ElasticSearchQueryParser(),
ElasticSearchRuleParser(),
OpenSearchParser(),
GraylogParser(),
OpenSearchQueryParser(),
GraylogQueryParser(),
)


Expand Down
31 changes: 13 additions & 18 deletions translator/app/translator/platforms/athena/parsers/athena.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,39 +19,34 @@
import re
from typing import Optional

from app.translator.core.models.parser_output import MetaInfoContainer, SiemContainer
from app.translator.core.models.platform_details import PlatformDetails
from app.translator.core.parser import Parser
from app.translator.core.models.query_container import RawQueryContainer, TokenizedQueryContainer
from app.translator.core.parser import PlatformQueryParser
from app.translator.platforms.athena.const import athena_details
from app.translator.platforms.athena.mapping import AthenaMappings, athena_mappings
from app.translator.platforms.athena.tokenizer import AthenaTokenizer


class AthenaParser(Parser):
class AthenaQueryParser(PlatformQueryParser):
details: PlatformDetails = athena_details
mappings: AthenaMappings = athena_mappings
tokenizer = AthenaTokenizer()
query_delimiter_pattern = r"\sFROM\s\S*\sWHERE\s"
table_pattern = r"\sFROM\s(?P<table>[a-zA-Z\.\-\*]+)\sWHERE\s"

@staticmethod
def _get_meta_info(source_mapping_ids: list[str]) -> MetaInfoContainer:
return MetaInfoContainer(source_mapping_ids=source_mapping_ids)

def _parse_query(self, text: str) -> tuple[str, dict[str, Optional[str]]]:
def _parse_query(self, query: str) -> tuple[str, dict[str, Optional[str]]]:
log_source = {"table": None}
if re.search(self.query_delimiter_pattern, text, flags=re.IGNORECASE):
table_search = re.search(self.table_pattern, text)
if re.search(self.query_delimiter_pattern, query, flags=re.IGNORECASE):
table_search = re.search(self.table_pattern, query)
table = table_search.group("table")
log_source["table"] = table
return re.split(self.query_delimiter_pattern, text, flags=re.IGNORECASE)[1], log_source
return re.split(self.query_delimiter_pattern, query, flags=re.IGNORECASE)[1], log_source

return text, log_source
return query, log_source

def parse(self, text: str) -> SiemContainer:
query, log_sources = self._parse_query(text)
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
query, log_sources = self._parse_query(raw_query_container.query)
tokens, source_mappings = self.get_tokens_and_source_mappings(query, log_sources)
return SiemContainer(
query=tokens,
meta_info=self._get_meta_info([source_mapping.source_id for source_mapping in source_mappings]),
)
meta_info = raw_query_container.meta_info
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=tokens, meta_info=meta_info)
5 changes: 3 additions & 2 deletions translator/app/translator/platforms/athena/renders/athena.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,14 @@
limitations under the License.
-----------------------------------------------------------------
"""

from typing import Union

from app.translator.const import DEFAULT_VALUE_TYPE
from app.translator.core.exceptions.render import UnsupportedRenderMethod
from app.translator.core.mapping import LogSourceSignature
from app.translator.core.models.platform_details import PlatformDetails
from app.translator.core.render import BaseQueryFieldValue, BaseQueryRender
from app.translator.core.render import BaseQueryFieldValue, PlatformQueryRender
from app.translator.platforms.athena.const import athena_details
from app.translator.platforms.athena.mapping import AthenaMappings, athena_mappings

Expand Down Expand Up @@ -76,7 +77,7 @@ def keywords(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: # noqa: ARG00
raise UnsupportedRenderMethod(platform_name=self.details.name, method="Keywords")


class AthenaQueryRender(BaseQueryRender):
class AthenaQueryRender(PlatformQueryRender):
details: PlatformDetails = athena_details
mappings: AthenaMappings = athena_mappings

Expand Down
Loading
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy