1818 Union ,
1919)
2020
21+ from robot .parsing .lexer .tokens import Token
22+ from robot .utils .escaping import split_from_equals , unescape
23+ from robot .variables .finders import NOT_FOUND , NumberFinder
24+ from robot .variables .search import contains_variable , search_variable
2125from robotcode .core .lsp .types import Position
2226from robotcode .robot .utils import get_robot_version
23-
24- from ..utils .ast_utils import (
25- Token ,
27+ from robotcode .robot .utils .ast import (
2628 iter_over_keyword_names_and_owners ,
2729 range_from_token ,
2830 strip_variable_token ,
2931 tokenize_variables ,
3032 whitespace_at_begin_of_token ,
3133 whitespace_from_begin_of_token ,
3234)
35+
3336from .entities import (
3437 LibraryEntry ,
3538 VariableDefinition ,
@@ -56,8 +59,6 @@ async def get_run_keyword_keyworddoc_and_token_from_position(
5659 namespace : Namespace ,
5760 position : Position ,
5861 ) -> Tuple [Optional [Tuple [Optional [KeywordDoc ], Token ]], List [Token ]]:
59- from robot .utils .escaping import unescape
60-
6162 if keyword_doc is None or not keyword_doc .is_any_run_keyword ():
6263 return None , argument_tokens
6364
@@ -250,8 +251,6 @@ async def iter_expression_variables_from_token(
250251 skip_commandline_variables : bool = False ,
251252 return_not_found : bool = False ,
252253 ) -> AsyncIterator [Tuple [Token , VariableDefinition ]]:
253- from robot .api .parsing import Token as RobotToken
254-
255254 variable_started = False
256255 try :
257256 for toknum , tokval , (_ , tokcol ), _ , _ in generate_tokens (StringIO (expression .value ).readline ):
@@ -264,7 +263,7 @@ async def iter_expression_variables_from_token(
264263 skip_commandline_variables = skip_commandline_variables ,
265264 ignore_error = True ,
266265 )
267- sub_token = RobotToken (
266+ sub_token = Token (
268267 expression .type ,
269268 tokval ,
270269 expression .lineno ,
@@ -291,12 +290,10 @@ async def iter_expression_variables_from_token(
291290
292291 @staticmethod
293292 def remove_index_from_variable_token (token : Token ) -> Tuple [Token , Optional [Token ]]:
294- from robot .parsing .lexer import Token as RobotToken
295-
296293 def escaped (i : int ) -> bool :
297- return token .value [- i - 3 : - i - 2 ] == "\\ "
294+ return bool ( token .value [- i - 3 : - i - 2 ] == "\\ " )
298295
299- if token .type != RobotToken .VARIABLE or not token .value .endswith ("]" ):
296+ if token .type != Token .VARIABLE or not token .value .endswith ("]" ):
300297 return (token , None )
301298
302299 braces = 1
@@ -322,9 +319,9 @@ def escaped(i: int) -> bool:
322319 return (token , None )
323320
324321 value = token .value [: - index - 2 ]
325- var = RobotToken (token .type , value , token .lineno , token .col_offset , token .error ) if len (value ) > 0 else None
326- rest = RobotToken (
327- RobotToken .ARGUMENT ,
322+ var = Token (token .type , value , token .lineno , token .col_offset , token .error ) if len (value ) > 0 else None
323+ rest = Token (
324+ Token .ARGUMENT ,
328325 token .value [- index - 2 :],
329326 token .lineno ,
330327 token .col_offset + len (value ),
@@ -342,10 +339,8 @@ def _tokenize_variables(
342339 * ,
343340 extra_types : Optional [Set [str ]] = None ,
344341 ) -> Iterator [Token ]:
345- from robot .api .parsing import Token as RobotToken
346-
347342 for t in tokenize_variables (token , identifiers , ignore_errors , extra_types = extra_types ):
348- if t .type == RobotToken .VARIABLE :
343+ if t .type == Token .VARIABLE :
349344 var , rest = cls .remove_index_from_variable_token (t )
350345 if var is not None :
351346 yield var
@@ -364,12 +359,7 @@ async def iter_variables_from_token(
364359 skip_commandline_variables : bool = False ,
365360 return_not_found : bool = False ,
366361 ) -> AsyncIterator [Tuple [Token , VariableDefinition ]]:
367- from robot .api .parsing import Token as RobotToken
368- from robot .variables .search import contains_variable , search_variable
369-
370362 def is_number (name : str ) -> bool :
371- from robot .variables .finders import NOT_FOUND , NumberFinder
372-
373363 if name .startswith ("$" ):
374364 finder = NumberFinder ()
375365 return bool (finder .find (name ) != NOT_FOUND )
@@ -379,13 +369,13 @@ async def iter_token(
379369 to : Token , ignore_errors : bool = False
380370 ) -> AsyncIterator [Union [Token , Tuple [Token , VariableDefinition ]]]:
381371 for sub_token in cls ._tokenize_variables (to , ignore_errors = ignore_errors ):
382- if sub_token .type == RobotToken .VARIABLE :
372+ if sub_token .type == Token .VARIABLE :
383373 base = sub_token .value [2 :- 1 ]
384374 if base and not (base [0 ] == "{" and base [- 1 ] == "}" ):
385375 yield sub_token
386376 elif base :
387377 async for v in cls .iter_expression_variables_from_token (
388- RobotToken (
378+ Token (
389379 sub_token .type ,
390380 base [1 :- 1 ],
391381 sub_token .lineno ,
@@ -413,7 +403,7 @@ async def iter_token(
413403
414404 if contains_variable (base , "$@&%" ):
415405 async for sub_token_or_var in iter_token (
416- RobotToken (
406+ Token (
417407 to .type ,
418408 base ,
419409 sub_token .lineno ,
@@ -422,17 +412,17 @@ async def iter_token(
422412 ignore_errors = ignore_errors ,
423413 ):
424414 if isinstance (sub_token_or_var , Token ):
425- if sub_token_or_var .type == RobotToken .VARIABLE :
415+ if sub_token_or_var .type == Token .VARIABLE :
426416 yield sub_token_or_var
427417 else :
428418 yield sub_token_or_var
429419
430- if token .type == RobotToken .VARIABLE and token .value .endswith ("=" ):
420+ if token .type == Token .VARIABLE and token .value .endswith ("=" ):
431421 match = search_variable (token .value , ignore_errors = True )
432422 if not match .is_assign (allow_assign_mark = True ):
433423 return
434424
435- token = RobotToken (
425+ token = Token (
436426 token .type ,
437427 token .value [:- 1 ].strip (),
438428 token .lineno ,
@@ -459,7 +449,7 @@ async def iter_token(
459449 continue
460450
461451 if (
462- sub_token .type == RobotToken .VARIABLE
452+ sub_token .type == Token .VARIABLE
463453 and sub_token .value [:1 ] in "$@&%"
464454 and sub_token .value [1 :2 ] == "{"
465455 and sub_token .value [- 1 :] == "}"
@@ -475,7 +465,7 @@ async def iter_token(
475465 skip_commandline_variables = skip_commandline_variables ,
476466 ignore_error = True ,
477467 )
478- sub_sub_token = RobotToken (sub_token .type , name , sub_token .lineno , sub_token .col_offset )
468+ sub_sub_token = Token (sub_token .type , name , sub_token .lineno , sub_token .col_offset )
479469 if var is not None :
480470 yield strip_variable_token (sub_sub_token ), var
481471 continue
@@ -529,8 +519,6 @@ def get_expression_statement_types(cls) -> Tuple[Type[Any]]:
529519
530520 @classmethod
531521 def split_bdd_prefix (cls , namespace : Namespace , token : Token ) -> Tuple [Optional [Token ], Optional [Token ]]:
532- from robot .parsing .lexer import Token as RobotToken
533-
534522 bdd_token = None
535523
536524 parts = token .value .split ()
@@ -543,15 +531,15 @@ def split_bdd_prefix(cls, namespace: Namespace, token: Token) -> Tuple[Optional[
543531 namespace .languages .bdd_prefixes if namespace .languages is not None else DEFAULT_BDD_PREFIXES
544532 ):
545533 bdd_len = len (prefix )
546- bdd_token = RobotToken (
534+ bdd_token = Token (
547535 token .type ,
548536 token .value [:bdd_len ],
549537 token .lineno ,
550538 token .col_offset ,
551539 token .error ,
552540 )
553541
554- token = RobotToken (
542+ token = Token (
555543 token .type ,
556544 token .value [bdd_len + 1 :],
557545 token .lineno ,
@@ -564,14 +552,12 @@ def split_bdd_prefix(cls, namespace: Namespace, token: Token) -> Tuple[Optional[
564552
565553 @classmethod
566554 def strip_bdd_prefix (cls , namespace : Namespace , token : Token ) -> Token :
567- from robot .parsing .lexer import Token as RobotToken
568-
569555 if get_robot_version () < (6 , 0 ):
570556 bdd_match = cls .BDD_TOKEN_REGEX .match (token .value )
571557 if bdd_match :
572558 bdd_len = len (bdd_match .group (1 ))
573559
574- token = RobotToken (
560+ token = Token (
575561 token .type ,
576562 token .value [bdd_len + 1 :],
577563 token .lineno ,
@@ -590,7 +576,7 @@ def strip_bdd_prefix(cls, namespace: Namespace, token: Token) -> Token:
590576 namespace .languages .bdd_prefixes if namespace .languages is not None else DEFAULT_BDD_PREFIXES
591577 ):
592578 bdd_len = len (prefix )
593- token = RobotToken (
579+ token = Token (
594580 token .type ,
595581 token .value [bdd_len + 1 :],
596582 token .lineno ,
@@ -637,9 +623,6 @@ def get_argument_info_at_position(
637623 token_at_position : Token ,
638624 position : Position ,
639625 ) -> Tuple [int , Optional [List [ArgumentInfo ]], Optional [Token ]]:
640- from robot .parsing .lexer .tokens import Token as RobotToken
641- from robot .utils .escaping import split_from_equals
642-
643626 argument_index = - 1
644627 named_arg = False
645628
@@ -656,35 +639,35 @@ def get_argument_info_at_position(
656639 token_at_position_index = tokens .index (token_at_position )
657640
658641 if (
659- token_at_position .type in [RobotToken .EOL , RobotToken .SEPARATOR ]
642+ token_at_position .type in [Token .EOL , Token .SEPARATOR ]
660643 and token_at_position_index > 2
661- and tokens [token_at_position_index - 1 ].type == RobotToken .CONTINUATION
644+ and tokens [token_at_position_index - 1 ].type == Token .CONTINUATION
662645 and position .character < range_from_token (tokens [token_at_position_index - 1 ]).end .character + 2
663646 ):
664647 return - 1 , None , None
665648
666649 token_at_position_index = tokens .index (token_at_position )
667650
668651 argument_token_index = token_at_position_index
669- while argument_token_index >= 0 and tokens [argument_token_index ].type != RobotToken .ARGUMENT :
652+ while argument_token_index >= 0 and tokens [argument_token_index ].type != Token .ARGUMENT :
670653 argument_token_index -= 1
671654
672655 if (
673- token_at_position .type == RobotToken .EOL
656+ token_at_position .type == Token .EOL
674657 and len (tokens ) > 1
675- and tokens [argument_token_index - 1 ].type == RobotToken .CONTINUATION
658+ and tokens [argument_token_index - 1 ].type == Token .CONTINUATION
676659 ):
677660 argument_token_index -= 2
678- while argument_token_index >= 0 and tokens [argument_token_index ].type != RobotToken .ARGUMENT :
661+ while argument_token_index >= 0 and tokens [argument_token_index ].type != Token .ARGUMENT :
679662 argument_token_index -= 1
680663
681- arguments = [a for a in tokens if a .type == RobotToken .ARGUMENT ]
664+ arguments = [a for a in tokens if a .type == Token .ARGUMENT ]
682665
683666 argument_token : Optional [Token ] = None
684667
685668 if argument_token_index >= 0 :
686669 argument_token = tokens [argument_token_index ]
687- if argument_token is not None and argument_token .type == RobotToken .ARGUMENT :
670+ if argument_token is not None and argument_token .type == Token .ARGUMENT :
688671 argument_index = arguments .index (argument_token )
689672 else :
690673 argument_index = 0
@@ -705,19 +688,19 @@ def get_argument_info_at_position(
705688 r .end .character = r .start .character + whitespace_at_begin_of_token (token_at_position ) - 3
706689 if not position .is_in_range (r , False ):
707690 argument_token_index += 2
708- if argument_token_index < len (tokens ) and tokens [argument_token_index ].type == RobotToken .ARGUMENT :
691+ if argument_token_index < len (tokens ) and tokens [argument_token_index ].type == Token .ARGUMENT :
709692 argument_token = tokens [argument_token_index ]
710693
711694 if (
712695 argument_index < 0
713696 or argument_token is not None
714- and argument_token .type == RobotToken .ARGUMENT
697+ and argument_token .type == Token .ARGUMENT
715698 and argument_token .value .startswith (("@{" , "&{" ))
716699 and argument_token .value .endswith ("}" )
717700 ):
718701 return - 1 , kw_arguments , argument_token
719702
720- if argument_token is not None and argument_token .type == RobotToken .ARGUMENT :
703+ if argument_token is not None and argument_token .type == Token .ARGUMENT :
721704 arg_name_or_value , arg_value = split_from_equals (argument_token .value )
722705 if arg_value is not None :
723706 old_argument_index = argument_index
0 commit comments