Robot Framework
blocklexers.py
Go to the documentation of this file.
1 # Copyright 2008-2015 Nokia Networks
2 # Copyright 2016- Robot Framework Foundation
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15 
16 from robot.utils import normalize_whitespace
17 
18 from .tokens import Token
19 from .statementlexers import (Lexer,
20  SettingSectionHeaderLexer, SettingLexer,
21  VariableSectionHeaderLexer, VariableLexer,
22  TestCaseSectionHeaderLexer,
23  TaskSectionHeaderLexer,
24  KeywordSectionHeaderLexer,
25  CommentSectionHeaderLexer, CommentLexer, ImplicitCommentLexer,
26  ErrorSectionHeaderLexer,
27  TestOrKeywordSettingLexer,
28  KeywordCallLexer,
29  IfHeaderLexer, ElseIfHeaderLexer, ElseHeaderLexer,
30  InlineIfHeaderLexer, EndLexer,
31  TryHeaderLexer, ExceptHeaderLexer, FinallyHeaderLexer,
32  ForHeaderLexer, WhileHeaderLexer,
33  ContinueLexer, BreakLexer, ReturnLexer)
34 
35 
37 
38 
39  def __init__(self, ctx):
40  super().__init__(ctx)
41  self.lexerslexers = []
42 
43  def accepts_more(self, statement):
44  return True
45 
46  def input(self, statement):
47  if self.lexerslexers and self.lexerslexers[-1].accepts_more(statement):
48  lexer = self.lexerslexers[-1]
49  else:
50  lexer = self.lexer_forlexer_for(statement)
51  self.lexerslexers.append(lexer)
52  lexer.input(statement)
53  return lexer
54 
55  def lexer_for(self, statement):
56  for cls in self.lexer_classeslexer_classes():
57  if cls.handleshandles(statement, self.ctxctx):
58  lexer = cls(self.ctxctx)
59  return lexer
60  raise TypeError(f"{type(self).__name__} does not have lexer for "
61  f"statement {statement}.")
62 
63  def lexer_classes(self):
64  return ()
65 
66  def lex(self):
67  for lexer in self.lexerslexers:
68  lexer.lex()
69 
70  def _lex_with_priority(self, priority):
71  for lexer in self.lexerslexers:
72  if isinstance(lexer, priority):
73  lexer.lex()
74  for lexer in self.lexerslexers:
75  if not isinstance(lexer, priority):
76  lexer.lex()
77 
78 
80 
81  def lex(self):
82  self._lex_with_priority_lex_with_priority(priority=SettingSectionLexer)
83 
84  def lexer_classes(self):
85  return (SettingSectionLexer, VariableSectionLexer,
86  TestCaseSectionLexer, TaskSectionLexer,
87  KeywordSectionLexer, CommentSectionLexer,
88  ErrorSectionLexer, ImplicitCommentSectionLexer)
89 
90 
92 
93  def accepts_more(self, statement):
94  return not statement[0].value.startswith('*')
95 
96 
98 
99  @classmethod
100  def handles(cls, statement, ctx):
101  return ctx.setting_section(statement)
102 
103  def lexer_classes(self):
104  return (SettingSectionHeaderLexer, SettingLexer)
105 
106 
108 
109  @classmethod
110  def handles(cls, statement, ctx):
111  return ctx.variable_section(statement)
112 
113  def lexer_classes(self):
114  return (VariableSectionHeaderLexer, VariableLexer)
115 
116 
118 
119  @classmethod
120  def handles(cls, statement, ctx):
121  return ctx.test_case_section(statement)
122 
123  def lexer_classes(self):
124  return (TestCaseSectionHeaderLexer, TestCaseLexer)
125 
126 
128 
129  @classmethod
130  def handles(cls, statement, ctx):
131  return ctx.task_section(statement)
132 
133  def lexer_classes(self):
134  return (TaskSectionHeaderLexer, TestCaseLexer)
135 
136 
138 
139  @classmethod
140  def handles(cls, statement, ctx):
141  return ctx.keyword_section(statement)
142 
143  def lexer_classes(self):
144  return (KeywordSectionHeaderLexer, KeywordLexer)
145 
146 
148 
149  @classmethod
150  def handles(cls, statement, ctx):
151  return ctx.comment_section(statement)
152 
153  def lexer_classes(self):
154  return (CommentSectionHeaderLexer, CommentLexer)
155 
156 
158 
159  @classmethod
160  def handles(cls, statement, ctx):
161  return True
162 
163  def lexer_classes(self):
164  return (ImplicitCommentLexer,)
165 
166 
168 
169  @classmethod
170  def handles(cls, statement, ctx):
171  return statement and statement[0].value.startswith('*')
172 
173  def lexer_classes(self):
174  return (ErrorSectionHeaderLexer, CommentLexer)
175 
176 
178  name_type = NotImplemented
179 
182  _name_seen = False
183 
184  def accepts_more(self, statement):
185  return not statement[0].value
186 
187  def input(self, statement):
188  self._handle_name_or_indentation_handle_name_or_indentation(statement)
189  if statement:
190  super().input(statement)
191 
192  def _handle_name_or_indentation(self, statement):
193  if not self._name_seen_name_seen_name_seen:
194  token = statement.pop(0)
195  token.type = self.name_typename_type
196  if statement:
197  token._add_eos_after = True
198  self._name_seen_name_seen_name_seen = True
199  else:
200  while statement and not statement[0].value:
201  statement.pop(0).type = None # These tokens will be ignored
202 
203  def lexer_classes(self):
204  return (TestOrKeywordSettingLexer, BreakLexer, ContinueLexer,
205  ForLexer, InlineIfLexer, IfLexer, ReturnLexer, TryLexer,
206  WhileLexer, KeywordCallLexer)
207 
208 
210  name_type = Token.TESTCASE_NAME
211 
212 
213  def __init__(self, ctx):
214  super().__init__(ctx.test_case_context())
215 
216  def lex(self,):
217  self._lex_with_priority_lex_with_priority(priority=TestOrKeywordSettingLexer)
218 
219 
221  name_type = Token.KEYWORD_NAME
222 
223  def __init__(self, ctx):
224  super().__init__(ctx.keyword_context())
225 
226 
228 
229  def __init__(self, ctx):
230  super().__init__(ctx)
231  self._block_level_block_level = 0
232 
233  def accepts_more(self, statement):
234  return self._block_level_block_level > 0
235 
236  def input(self, statement):
237  lexer = super().input(statement)
238  if isinstance(lexer, (ForHeaderLexer, IfHeaderLexer, TryHeaderLexer,
239  WhileHeaderLexer)):
240  self._block_level_block_level += 1
241  if isinstance(lexer, EndLexer):
242  self._block_level_block_level -= 1
243 
244 
246 
247  @classmethod
248  def handles(cls, statement, ctx):
249  return ForHeaderLexer.handles(statement, ctx)
250 
251  def lexer_classes(self):
252  return (ForHeaderLexer, InlineIfLexer, IfLexer, TryLexer, WhileLexer, EndLexer,
253  ReturnLexer, ContinueLexer, BreakLexer, KeywordCallLexer)
254 
255 
257 
258  @classmethod
259  def handles(cls, statement, ctx):
260  return WhileHeaderLexer.handles(statement, ctx)
261 
262  def lexer_classes(self):
263  return (WhileHeaderLexer, ForLexer, InlineIfLexer, IfLexer, TryLexer, EndLexer,
264  ReturnLexer, ContinueLexer, BreakLexer, KeywordCallLexer)
265 
266 
268 
269  @classmethod
270  def handles(cls, statement, ctx):
271  return IfHeaderLexer.handles(statement, ctx)
272 
273  def lexer_classes(self):
274  return (InlineIfLexer, IfHeaderLexer, ElseIfHeaderLexer, ElseHeaderLexer,
275  ForLexer, TryLexer, WhileLexer, EndLexer, ReturnLexer, ContinueLexer,
276  BreakLexer, KeywordCallLexer)
277 
278 
280 
281  @classmethod
282  def handles(cls, statement, ctx):
283  if len(statement) <= 2:
284  return False
285  return InlineIfHeaderLexer.handles(statement, ctx)
286 
287  def accepts_more(self, statement):
288  return False
289 
290  def lexer_classes(self):
291  return (InlineIfHeaderLexer, ElseIfHeaderLexer, ElseHeaderLexer,
292  ReturnLexer, ContinueLexer, BreakLexer, KeywordCallLexer)
293 
294  def input(self, statement):
295  for part in self._split_split(statement):
296  if part:
297  super().input(part)
298  return self
299 
300  def _split(self, statement):
301  current = []
302  expect_condition = False
303  for token in statement:
304  if expect_condition:
305  if token is not statement[-1]:
306  token._add_eos_after = True
307  current.append(token)
308  yield current
309  current = []
310  expect_condition = False
311  elif token.value == 'IF':
312  current.append(token)
313  expect_condition = True
314  elif normalize_whitespace(token.value) == 'ELSE IF':
315  token._add_eos_before = True
316  yield current
317  current = [token]
318  expect_condition = True
319  elif token.value == 'ELSE':
320  token._add_eos_before = True
321  if token is not statement[-1]:
322  token._add_eos_after = True
323  yield current
324  current = []
325  yield [token]
326  else:
327  current.append(token)
328  yield current
329 
330 
332 
333  @classmethod
334  def handles(cls, statement, ctx):
335  return TryHeaderLexer(ctx).handles(statement, ctx)
336 
337  def lexer_classes(self):
338  return (TryHeaderLexer, ExceptHeaderLexer, ElseHeaderLexer, FinallyHeaderLexer,
339  ForLexer, InlineIfLexer, IfLexer, WhileLexer, EndLexer, ReturnLexer,
340  BreakLexer, ContinueLexer, KeywordCallLexer)
def __init__(self, ctx)
:type ctx: :class:robot.parsing.lexer.context.FileContext
Definition: blocklexers.py:39
def handles(cls, statement, ctx)
Definition: blocklexers.py:248
def handles(cls, statement, ctx)
Definition: blocklexers.py:270
def __init__(self, ctx)
:type ctx: :class:robot.parsing.lexer.context.FileContext
Definition: blocklexers.py:223
def __init__(self, ctx)
:type ctx: :class:robot.parsing.lexer.context.FileContext
Definition: blocklexers.py:229
def __init__(self, ctx)
:type ctx: :class:robot.parsing.lexer.context.TestCaseFileContext
Definition: blocklexers.py:213
def handles(cls, statement, ctx)
Definition: blocklexers.py:334
def normalize_whitespace(string)
Definition: normalizing.py:45