16 from itertools
import chain
19 from robot.utils import get_error_message, FileReader
21 from .blocklexers
import FileLexer
22 from .context
import InitFileContext, TestCaseFileContext, ResourceFileContext
23 from .tokenizer
import Tokenizer
24 from .tokens
import EOS, END, Token
49 def get_tokens(source, data_only=False, tokenize_variables=False, lang=None):
52 return lexer.get_tokens()
63 return lexer.get_tokens()
72 def get_init_tokens(source, data_only=False, tokenize_variables=False, lang=None):
75 return lexer.get_tokens()
80 def __init__(self, ctx, data_only=False, tokenize_variables=False):
95 data = [t
for t
in statement
if t.type
is None]
101 with FileReader(source, accept_text=
True)
as reader:
107 self.
lexerlexer.lex()
110 statements = chain.from_iterable(
121 ignored_types = {
None, Token.COMMENT_HEADER, Token.COMMENT}
123 ignored_types = {
None}
124 inline_if_type = Token.INLINE_IF
125 for statement
in statements:
128 for token
in statement:
129 token_type = token.type
130 if token_type
in ignored_types:
132 if token._add_eos_before
and not (last
and last._add_eos_after):
133 yield EOS.from_token(token, before=
True)
135 if token._add_eos_after:
136 yield EOS.from_token(token)
137 if token_type == inline_if_type:
140 if last
and not last._add_eos_after:
141 yield EOS.from_token(last)
143 yield END.from_token(last, virtual=
True)
144 yield EOS.from_token(last)
148 commented_or_empty = []
149 for line
in reversed(lines):
152 commented_or_empty.append(line)
153 if not commented_or_empty:
155 lines = lines[:-len(commented_or_empty)]
156 statement = list(chain.from_iterable(lines))
157 return [statement] + list(reversed(commented_or_empty))
162 for token
in statement:
163 current.append(token)
164 if token.type == Token.EOL:
165 lines.append(current)
168 lines.append(current)
172 separator_or_ignore = (Token.SEPARATOR,
None)
173 comment_or_eol = (Token.COMMENT, Token.EOL)
175 if token.type
not in separator_or_ignore:
176 return token.type
in comment_or_eol
181 for t
in token.tokenize_variables():
def _tokenize_variables(self, tokens)
def _split_to_lines(self, statement)
def __init__(self, ctx, data_only=False, tokenize_variables=False)
def _split_trailing_commented_and_empty_lines(self, statement)
def _get_tokens(self, statements)
def _is_commented_or_empty(self, line)
def get_init_tokens(source, data_only=False, tokenize_variables=False, lang=None)
Parses the given source to init file tokens.
def get_resource_tokens(source, data_only=False, tokenize_variables=False, lang=None)
Parses the given source to resource file tokens.
def get_tokens(source, data_only=False, tokenize_variables=False, lang=None)
Parses the given source to tokens.
def get_error_message()
Returns error message of the last occurred exception.