Skip to content

Commit 57fe244

Browse files
author
Oleksandr Volha
committed
fix chronicle rule tokenization
1 parent 8bc0519 commit 57fe244

File tree

1 file changed

+12
-17
lines changed

1 file changed

+12
-17
lines changed

translator/app/translator/platforms/chronicle/tokenizer.py

Lines changed: 12 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -83,26 +83,15 @@ class ChronicleRuleTokenizer(ChronicleQueryTokenizer):
8383
back_quotes_value_pattern = (
8484
rf"`(?P<{ValueType.back_quotes_value}>(?:[:a-zA-Z\*0-9=+%#\-_/,\'\"\\\.$&^@!\(\)\{{\}}\s])*)`"
8585
)
86-
regex_value_regex = rf"{double_quotes_value_pattern}|{back_quotes_value_pattern}\s*\)\s*(?:nocase)?\s*"
86+
regex_value_pattern = rf"(?:{double_quotes_value_pattern}|{back_quotes_value_pattern})\s*\)\s*(?:nocase)?\s*"
87+
regex_field_value_pattern = rf"{regex_field_regex}\s*{regex_value_pattern}"
8788

8889
def search_field_value(self, query: str) -> tuple[FieldValue, str]:
89-
if query.startswith("re.regex("):
90-
field_search = re.search(self.regex_field_regex, query)
91-
if field_search is None:
92-
raise TokenizerGeneralException(error=f"Field couldn't be found in query part: {query}")
93-
94-
field = field_search.group("field")
95-
pos = field_search.end()
96-
query = query[pos:]
97-
98-
value_search = re.search(self.regex_value_regex, query)
99-
if value_search is None:
100-
raise TokenizerGeneralException(error=f"Value couldn't be found in query part: {query}")
101-
102-
operator = OperatorType.REGEX
103-
operator, value = self.get_operator_and_value(value_search, operator)
90+
if regex_field_value_search := re.match(self.regex_field_value_pattern, query):
91+
field = regex_field_value_search.group("field")
92+
operator, value = self.get_operator_and_value(regex_field_value_search, operator=OperatorType.REGEX)
10493
operator, value = self.process_value_wildcards(value=value, operator=OperatorType.REGEX)
105-
pos = value_search.end()
94+
pos = regex_field_value_search.end()
10695
query = query[pos:]
10796

10897
operator_token = Identifier(token_type=operator)
@@ -119,3 +108,9 @@ def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.E
119108
return operator, self.escape_manager.remove_escape(b_q_value)
120109

121110
return super().get_operator_and_value(match, operator)
111+
112+
def _check_field_value_match(self, query: str, white_space_pattern: str = r"\s+") -> bool:
113+
if re.match(self.regex_field_value_pattern, query, re.IGNORECASE):
114+
return True
115+
116+
return super()._check_field_value_match(query, white_space_pattern)

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy