2727 TokenizerGeneralException ,
2828 QueryParenthesesException
2929)
30- from app .translator .core .mapping import SourceMapping , DEFAULT_MAPPING_NAME , BasePlatformMappings
31- from app .translator .core .models .field import Field , Keyword
30+ from app .translator .core .mapping import SourceMapping
31+ from app .translator .core .models .field import Field , FieldValue , Keyword
3232from app .translator .core .models .functions .base import Function
3333from app .translator .core .models .functions .sort import SortArg
3434from app .translator .core .models .identifier import Identifier
3535from app .translator .core .custom_types .tokens import OperatorType , GroupType
3636from app .translator .tools .utils import get_match_group
3737
38- TOKEN_TYPE = Union [Field , Keyword , Identifier ]
38+ TOKEN_TYPE = Union [FieldValue , Keyword , Identifier ]
3939
4040
4141class BaseTokenizer (ABC ):
4242 @abstractmethod
43- def tokenize (self , query : str ) -> List [Union [Field , Keyword , Identifier ]]:
43+ def tokenize (self , query : str ) -> List [Union [FieldValue , Keyword , Identifier ]]:
4444 raise NotImplementedError ()
4545
4646
@@ -180,18 +180,18 @@ def process_value_wildcard_symbols(self,
180180 return self ._clean_value (value , wildcard_symbol ), op
181181
182182 @staticmethod
183- def create_field (field_name : str , operator : Identifier , value : Union [str , List ]) -> Field :
184- return Field ( operator = operator , value = value , source_name = field_name )
183+ def create_field_value (field_name : str , operator : Identifier , value : Union [str , List ]) -> FieldValue :
184+ return FieldValue ( source_name = field_name , operator = operator , value = value )
185185
186- def search_field_value (self , query ):
186+ def search_field_value (self , query ) -> Tuple [ FieldValue , str ] :
187187 field_name = self .search_field (query )
188188 operator = self .search_operator (query , field_name )
189189 query , operator , value = self .search_value (query = query , operator = operator , field_name = field_name )
190190 value , operator_token = self .process_value_wildcard_symbols (value = value ,
191191 operator = operator ,
192192 wildcard_symbol = self .wildcard_symbol )
193- field = self .create_field (field_name = field_name , operator = operator_token , value = value )
194- return field , query
193+ field_value = self .create_field_value (field_name = field_name , operator = operator_token , value = value )
194+ return field_value , query
195195
196196 def _match_field_value (self , query : str , white_space_pattern : str = r"\s+" ) -> bool :
197197 single_value_operator_group = fr"(?:{ '|' .join (self .single_value_operators_map )} )"
@@ -208,7 +208,7 @@ def _match_field_value(self, query: str, white_space_pattern: str = r"\s+") -> b
208208
209209 return False
210210
211- def _get_identifier (self , query : str ) -> Tuple [Union [Field , Keyword , Identifier ], str ]:
211+ def _get_identifier (self , query : str ) -> Tuple [Union [FieldValue , Keyword , Identifier ], str ]:
212212 query = query .strip ("\n " ).strip (" " ).strip ("\n " )
213213 if query .startswith (GroupType .L_PAREN ):
214214 return Identifier (token_type = GroupType .L_PAREN ), query [1 :]
@@ -240,7 +240,7 @@ def _validate_parentheses(tokens):
240240 raise QueryParenthesesException ()
241241 return True
242242
243- def tokenize (self , query : str ) -> List [Union [Field , Keyword , Identifier ]]:
243+ def tokenize (self , query : str ) -> List [Union [FieldValue , Keyword , Identifier ]]:
244244 tokenized = []
245245 while query :
246246 identifier , query = self ._get_identifier (query = query )
@@ -250,34 +250,28 @@ def tokenize(self, query: str) -> List[Union[Field, Keyword, Identifier]]:
250250
251251 @staticmethod
252252 def filter_tokens (tokens : List [TOKEN_TYPE ],
253- token_type : Union [Type [Field ], Type [Keyword ], Type [Identifier ]]) -> List [TOKEN_TYPE ]:
253+ token_type : Union [Type [FieldValue ], Type [Keyword ], Type [Identifier ]]) -> List [TOKEN_TYPE ]:
254254 return [token for token in tokens if isinstance (token , token_type )]
255255
256- def filter_function_tokens (self ,
257- tokens : List [Union [Field , Keyword , Identifier , Function , SortArg ]]) -> List [TOKEN_TYPE ]:
256+ def get_field_tokens_from_func_args (self ,
257+ args : List [Union [Field , FieldValue , Keyword , Identifier , Function , SortArg ]]
258+ ) -> List [Field ]:
258259 result = []
259- for token in tokens :
260- if isinstance (token , Field ):
261- result .append (token )
262- elif isinstance (token , Function ):
263- result .extend (self .filter_function_tokens (tokens = token .args ))
264- result .extend (self .filter_function_tokens (tokens = token .by_clauses ))
265- elif isinstance (token , SortArg ):
266- result .append (token .field )
260+ for arg in args :
261+ if isinstance (arg , Field ):
262+ result .append (arg )
263+ elif isinstance (arg , FieldValue ):
264+ result .append (arg .field )
265+ elif isinstance (arg , Function ):
266+ result .extend (self .get_field_tokens_from_func_args (args = arg .args ))
267+ result .extend (self .get_field_tokens_from_func_args (args = arg .by_clauses ))
268+ elif isinstance (arg , SortArg ):
269+ result .append (arg .field )
267270 return result
268271
269272 @staticmethod
270- def set_field_generic_names_map (tokens : List [Field ],
271- source_mappings : List [SourceMapping ],
272- platform_mappings : BasePlatformMappings ) -> None :
273+ def set_field_tokens_generic_names_map (tokens : List [Field ],
274+ source_mappings : List [SourceMapping ],
275+ default_mapping : SourceMapping ) -> None :
273276 for token in tokens :
274- generic_names_map = {
275- source_mapping .source_id : source_mapping .fields_mapping .get_generic_field_name (token .source_name )
276- for source_mapping in source_mappings
277- }
278- if DEFAULT_MAPPING_NAME not in generic_names_map :
279- default_source_mapping = platform_mappings .get_source_mapping (DEFAULT_MAPPING_NAME )
280- fields_mapping = default_source_mapping .fields_mapping
281- generic_names_map [DEFAULT_MAPPING_NAME ] = fields_mapping .get_generic_field_name (token .source_name )
282-
283- token .generic_names_map = generic_names_map
277+ token .set_generic_names_map (source_mappings , default_mapping )
0 commit comments