Content-Length: 647407 | pFad | http://github.com/UncoderIO/Uncoder_IO/commit/0766f1c17cac6a2ee50ba87c9596c128b63b3e08

8D TDM-9415 fixes · UncoderIO/Uncoder_IO@0766f1c · GitHub
Skip to content

Commit 0766f1c

Browse files
committed
TDM-9415 fixes
1 parent 3cc30bb commit 0766f1c

File tree

7 files changed

+49
-17
lines changed

7 files changed

+49
-17
lines changed

uncoder-core/app/translator/platforms/base/aql/parsers/aql.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
"""
1818

1919
import re
20-
from typing import Union
20+
from typing import Optional, Union
2121

2222
from app.translator.core.exceptions.parser import TokenizerGeneralException
2323
from app.translator.core.models.functions.base import ParsedFunctions
@@ -105,8 +105,8 @@ def __parse_log_sources(self, query: str) -> tuple[dict[str, Union[list[str], li
105105

106106
return log_sources, query
107107

108-
def _parse_query(self, text: str) -> tuple[str, dict[str, Union[list[str], list[int]]], ParsedFunctions]:
109-
query = self.__clean_query(text)
108+
def _parse_query(self, query: str) -> tuple[str, dict[str, Union[list[str], list[int]]], Optional[ParsedFunctions]]:
109+
query = self.__clean_query(query)
110110
self.__check_table(query)
111111
query, functions = self.platform_functions.parse(query)
112112
log_sources, query = self.__parse_log_sources(query)
@@ -118,7 +118,11 @@ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContain
118118
query_field_tokens, function_field_tokens, function_field_tokens_map = self.get_field_tokens(
119119
query_tokens, functions.functions
120120
)
121-
source_mappings = self.get_source_mappings(query_field_tokens + function_field_tokens, log_sources)
121+
source_mappings = self.get_source_mappings(
122+
field_tokens=query_field_tokens + function_field_tokens,
123+
log_sources=log_sources,
124+
alt_mapping=raw_query_container.meta_info.source_alt_mapping,
125+
)
122126
meta_info = raw_query_container.meta_info
123127
meta_info.query_fields = query_field_tokens
124128
meta_info.function_fields = function_field_tokens

uncoder-core/app/translator/platforms/base/lucene/parsers/lucene.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,9 @@
1717
"""
1818

1919
import re
20+
from typing import Optional, Union
2021

22+
from app.translator.core.models.functions.base import ParsedFunctions
2123
from app.translator.core.models.query_container import RawQueryContainer, TokenizedQueryContainer
2224
from app.translator.core.parser import PlatformQueryParser
2325
from app.translator.platforms.base.lucene.tokenizer import LuceneTokenizer
@@ -31,7 +33,7 @@ class LuceneQueryParser(PlatformQueryParser):
3133

3234
wrapped_with_comment_pattern = r"^\s*//.*(?:\n|$)"
3335

34-
def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]]]:
36+
def _parse_query(self, query: str) -> tuple[str, dict[str, Union[list[str], list[int]]], Optional[ParsedFunctions]]:
3537
log_sources = {}
3638
for source_type in self.log_source_key_types:
3739
pattern = self.log_source_pattern.replace("___source_type___", source_type)
@@ -43,13 +45,17 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]]]:
4345
pos_end = search.end()
4446
query = query[:pos_start] + query[pos_end:]
4547

46-
return query, log_sources
48+
return query, log_sources, None
4749

4850
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
49-
query, log_sources = self._parse_query(raw_query_container.query)
51+
query, log_sources, _ = self._parse_query(raw_query_container.query)
5052
query_tokens = self.get_query_tokens(query)
5153
query_field_tokens, _, _ = self.get_field_tokens(query_tokens)
52-
source_mappings = self.get_source_mappings(query_field_tokens, log_sources)
54+
source_mappings = self.get_source_mappings(
55+
field_tokens=query_field_tokens,
56+
log_sources=log_sources,
57+
alt_mapping=raw_query_container.meta_info.source_alt_mapping,
58+
)
5359
meta_info = raw_query_container.meta_info
5460
meta_info.query_fields = query_field_tokens
5561
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]

uncoder-core/app/translator/platforms/base/spl/parsers/spl.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,11 @@ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContain
7575
query_field_tokens, function_field_tokens, function_field_tokens_map = self.get_field_tokens(
7676
query_tokens, functions.functions
7777
)
78-
source_mappings = self.get_source_mappings(query_field_tokens + function_field_tokens, log_sources)
78+
source_mappings = self.get_source_mappings(
79+
field_tokens=query_field_tokens + function_field_tokens,
80+
log_sources=log_sources,
81+
alt_mapping=raw_query_container.meta_info.source_alt_mapping,
82+
)
7983
meta_info = raw_query_container.meta_info
8084
meta_info.query_fields = query_field_tokens
8185
meta_info.function_fields = function_field_tokens

uncoder-core/app/translator/platforms/base/sql/parsers/sql.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,9 @@
1717
"""
1818

1919
import re
20+
from typing import Optional, Union
2021

22+
from app.translator.core.models.functions.base import ParsedFunctions
2123
from app.translator.core.models.query_container import RawQueryContainer, TokenizedQueryContainer
2224
from app.translator.core.parser import PlatformQueryParser
2325
from app.translator.platforms.base.sql.tokenizer import SqlTokenizer
@@ -30,21 +32,25 @@ class SqlQueryParser(PlatformQueryParser):
3032

3133
wrapped_with_comment_pattern = r"^\s*--.*(?:\n|$)"
3234

33-
def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]]]:
35+
def _parse_query(self, query: str) -> tuple[str, dict[str, Union[list[str], list[int]]], Optional[ParsedFunctions]]:
3436
log_source = {"table": []}
3537
if re.search(self.query_delimiter_pattern, query, flags=re.IGNORECASE):
3638
table_search = re.search(self.table_pattern, query)
3739
table = table_search.group("table")
3840
log_source["table"] = [table]
39-
return re.split(self.query_delimiter_pattern, query, flags=re.IGNORECASE)[1], log_source
41+
return re.split(self.query_delimiter_pattern, query, flags=re.IGNORECASE)[1], log_source, None
4042

41-
return query, log_source
43+
return query, log_source, None
4244

4345
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
44-
query, log_sources = self._parse_query(raw_query_container.query)
46+
query, log_sources, _ = self._parse_query(raw_query_container.query)
4547
query_tokens = self.get_query_tokens(query)
4648
query_field_tokens, _, _ = self.get_field_tokens(query_tokens)
47-
source_mappings = self.get_source_mappings(query_field_tokens, log_sources)
49+
source_mappings = self.get_source_mappings(
50+
field_tokens=query_field_tokens,
51+
log_sources=log_sources,
52+
alt_mapping=raw_query_container.meta_info.source_alt_mapping,
53+
)
4854
meta_info = raw_query_container.meta_info
4955
meta_info.query_fields = query_field_tokens
5056
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]

uncoder-core/app/translator/platforms/chronicle/parsers/chronicle.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,11 @@ class ChronicleQueryParser(PlatformQueryParser):
3636
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
3737
query_tokens = self.get_query_tokens(raw_query_container.query)
3838
query_field_tokens, _, _ = self.get_field_tokens(query_tokens)
39-
source_mappings = self.get_source_mappings(query_field_tokens, {})
39+
source_mappings = self.get_source_mappings(
40+
field_tokens=query_field_tokens,
41+
log_sources={},
42+
alt_mapping=raw_query_container.meta_info.source_alt_mapping,
43+
)
4044
meta_info = raw_query_container.meta_info
4145
meta_info.query_fields = query_field_tokens
4246
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]

uncoder-core/app/translator/platforms/elasticsearch/parsers/elasticsearch_eql.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,11 @@ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContain
3030
query, log_sources = self._parse_query(raw_query_container.query)
3131
query_tokens = self.get_query_tokens(query)
3232
query_field_tokens, _, _ = self.get_field_tokens(query_tokens)
33-
source_mappings = self.get_source_mappings(query_field_tokens, log_sources)
33+
source_mappings = self.get_source_mappings(
34+
field_tokens=query_field_tokens,
35+
log_sources=log_sources,
36+
alt_mapping=raw_query_container.meta_info.source_alt_mapping,
37+
)
3438
meta_info = raw_query_container.meta_info
3539
meta_info.query_fields = query_field_tokens
3640
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]

uncoder-core/app/translator/platforms/logscale/parsers/logscale.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,11 @@ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContain
4646
query_field_tokens, function_field_tokens, function_field_tokens_map = self.get_field_tokens(
4747
query_tokens, functions.functions
4848
)
49-
source_mappings = self.get_source_mappings(query_field_tokens + function_field_tokens, {})
49+
source_mappings = self.get_source_mappings(
50+
field_tokens=query_field_tokens + function_field_tokens,
51+
log_sources={},
52+
alt_mapping=raw_query_container.meta_info.source_alt_mapping,
53+
)
5054
meta_info = raw_query_container.meta_info
5155
meta_info.query_fields = query_field_tokens
5256
meta_info.function_fields = function_field_tokens

0 commit comments

Comments
 (0)








ApplySandwichStrip

pFad - (p)hone/(F)rame/(a)nonymizer/(d)eclutterfier!      Saves Data!


--- a PPN by Garber Painting Akron. With Image Size Reduction included!

Fetched URL: http://github.com/UncoderIO/Uncoder_IO/commit/0766f1c17cac6a2ee50ba87c9596c128b63b3e08

Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy