21 from robot.utils import is_list_like, normalize_whitespace, seq2str, split_from_equals
22 from robot.variables import is_scalar_assign, is_dict_variable, search_variable
24 from ..lexer
import Token
37 _fields = (
'type',
'tokens')
41 _attributes = (
'lineno',
'col_offset',
'end_lineno',
'end_col_offset',
'errors')
45 _statement_handlers = {}
55 return self.
tokenstokens[0].lineno
if self.
tokenstokens
else -1
61 return self.
tokenstokens[0].col_offset
if self.
tokenstokens
else -1
67 return self.
tokenstokens[-1].lineno
if self.
tokenstokens
else -1
70 end_col_offset = property
73 return self.
tokenstokens[-1].end_col_offset
if self.
tokenstokens
else -1
77 types = subcls.handles_types
or (subcls.type,)
86 if token.type
in handlers:
87 return handlers[token.type](tokens)
88 if any(token.type == Token.ASSIGN
for token
in tokens):
108 raise NotImplementedError
111 data_tokens = property
114 return [t
for t
in self.
tokenstokens
if t.type
not in Token.NON_DATA_TOKENS]
122 for token
in self.
tokenstokens:
123 if token.type
in types:
129 return [t
for t
in self.
tokenstokens
if t.type
in types]
138 return token.value
if token
else default
142 return tuple(t.value
for t
in self.
tokenstokens
if t.type
in types)
149 for token
in self.
tokenstokens:
151 if token.type == Token.EOL:
161 return iter(self.tokens)
164 return len(self.
tokenstokens)
167 return self.
tokenstokens[item]
170 errors =
'' if not self.
errorserrors
else ', errors=%s' % list(self.
errorserrors)
171 return '%s(tokens=%s%s)' % (
type(self).__name__, list(self.
tokenstokens), errors)
185 if t.lineno != lineno:
190 return [
' '.join(line)
for line
in lines]
193 last_index = len(lines) - 1
194 for index, line
in enumerate(lines):
200 match = re.search(
r'(\\+)n?$', line)
201 return match
and len(match.group(1)) % 2 == 1
210 values = self.
get_valuesget_values(Token.NAME, Token.ARGUMENT)
211 if values
and values[0].upper() !=
'NONE':
222 return self.
get_valuesget_values(Token.ARGUMENT)
231 return self.
get_valueget_value(Token.NAME)
237 return self.
get_valuesget_values(Token.ARGUMENT)
242 handles_types = (Token.SETTING_HEADER, Token.VARIABLE_HEADER,
243 Token.TESTCASE_HEADER, Token.TASK_HEADER,
244 Token.KEYWORD_HEADER, Token.COMMENT_HEADER)
249 names = (
'Settings',
'Variables',
'Test Cases',
'Tasks',
250 'Keywords',
'Comments')
252 if not name.startswith(
'*'):
253 name =
'*** %s ***' % name
279 def from_params(cls, name, args=(), alias=
None, separator=FOUR_SPACES, eol=EOL):
280 tokens = [
Token(Token.LIBRARY,
'Library'),
281 Token(Token.SEPARATOR, separator),
282 Token(Token.NAME, name)]
284 tokens.extend([
Token(Token.SEPARATOR, separator),
285 Token(Token.ARGUMENT, arg)])
286 if alias
is not None:
287 tokens.extend([
Token(Token.SEPARATOR, separator),
288 Token(Token.WITH_NAME),
289 Token(Token.SEPARATOR, separator),
290 Token(Token.NAME, alias)])
291 tokens.append(
Token(Token.EOL, eol))
298 return self.
get_valueget_value(Token.NAME)
304 return self.
get_valuesget_values(Token.ARGUMENT)
310 with_name = self.
get_tokenget_token(Token.WITH_NAME)
311 return self.
get_tokensget_tokens(Token.NAME)[-1].value
if with_name
else None
316 type = Token.RESOURCE
321 Token(Token.RESOURCE,
'Resource'),
322 Token(Token.SEPARATOR, separator),
323 Token(Token.NAME, name),
324 Token(Token.EOL, eol)
331 return self.
get_valueget_value(Token.NAME)
336 type = Token.VARIABLES
339 def from_params(cls, name, args=(), separator=FOUR_SPACES, eol=EOL):
340 tokens = [
Token(Token.VARIABLES,
'Variables'),
341 Token(Token.SEPARATOR, separator),
342 Token(Token.NAME, name)]
344 tokens.extend([
Token(Token.SEPARATOR, separator),
345 Token(Token.ARGUMENT, arg)])
346 tokens.append(
Token(Token.EOL, eol))
353 return self.
get_valueget_value(Token.NAME)
359 return self.
get_valuesget_values(Token.ARGUMENT)
364 type = Token.DOCUMENTATION
367 def from_params(cls, value, indent=FOUR_SPACES, separator=FOUR_SPACES,
368 eol=EOL, settings_section=True):
370 tokens = [
Token(Token.DOCUMENTATION,
'Documentation'),
371 Token(Token.SEPARATOR, separator)]
373 tokens = [
Token(Token.SEPARATOR, indent),
374 Token(Token.DOCUMENTATION,
'[Documentation]'),
375 Token(Token.SEPARATOR, separator)]
376 multiline_separator =
' ' * (len(tokens[-2].value) + len(separator) - 3)
377 doc_lines = value.splitlines()
379 tokens.extend([
Token(Token.ARGUMENT, doc_lines[0]),
380 Token(Token.EOL, eol)])
381 for line
in doc_lines[1:]:
382 if not settings_section:
383 tokens.append(
Token(Token.SEPARATOR, indent))
384 tokens.append(
Token(Token.CONTINUATION))
386 tokens.extend([
Token(Token.SEPARATOR, multiline_separator),
387 Token(Token.ARGUMENT, line)])
388 tokens.append(
Token(Token.EOL, eol))
395 tokens = self.
get_tokensget_tokens(Token.ARGUMENT)
401 type = Token.METADATA
404 def from_params(cls, name, value, separator=FOUR_SPACES, eol=EOL):
405 tokens = [
Token(Token.METADATA,
'Metadata'),
406 Token(Token.SEPARATOR, separator),
407 Token(Token.NAME, name)]
408 metadata_lines = value.splitlines()
410 tokens.extend([
Token(Token.SEPARATOR, separator),
411 Token(Token.ARGUMENT, metadata_lines[0]),
412 Token(Token.EOL, eol)])
413 for line
in metadata_lines[1:]:
414 tokens.extend([
Token(Token.CONTINUATION),
415 Token(Token.SEPARATOR, separator),
416 Token(Token.ARGUMENT, line),
417 Token(Token.EOL, eol)])
424 return self.
get_valueget_value(Token.NAME)
430 tokens = self.
get_tokensget_tokens(Token.ARGUMENT)
436 type = Token.FORCE_TAGS
440 tokens = [
Token(Token.FORCE_TAGS,
'Force Tags')]
442 tokens.extend([
Token(Token.SEPARATOR, separator),
443 Token(Token.ARGUMENT, tag)])
444 tokens.append(
Token(Token.EOL, eol))
450 type = Token.DEFAULT_TAGS
454 tokens = [
Token(Token.DEFAULT_TAGS,
'Default Tags')]
456 tokens.extend([
Token(Token.SEPARATOR, separator),
457 Token(Token.ARGUMENT, tag)])
458 tokens.append(
Token(Token.EOL, eol))
464 type = Token.KEYWORD_TAGS
468 tokens = [
Token(Token.KEYWORD_TAGS,
'Keyword Tags')]
470 tokens.extend([
Token(Token.SEPARATOR, separator),
471 Token(Token.ARGUMENT, tag)])
472 tokens.append(
Token(Token.EOL, eol))
478 type = Token.SUITE_SETUP
481 def from_params(cls, name, args=(), separator=FOUR_SPACES, eol=EOL):
482 tokens = [
Token(Token.SUITE_SETUP,
'Suite Setup'),
483 Token(Token.SEPARATOR, separator),
484 Token(Token.NAME, name)]
486 tokens.extend([
Token(Token.SEPARATOR, separator),
487 Token(Token.ARGUMENT, arg)])
488 tokens.append(
Token(Token.EOL, eol))
494 type = Token.SUITE_TEARDOWN
497 def from_params(cls, name, args=(), separator=FOUR_SPACES, eol=EOL):
498 tokens = [
Token(Token.SUITE_TEARDOWN,
'Suite Teardown'),
499 Token(Token.SEPARATOR, separator),
500 Token(Token.NAME, name)]
502 tokens.extend([
Token(Token.SEPARATOR, separator),
503 Token(Token.ARGUMENT, arg)])
504 tokens.append(
Token(Token.EOL, eol))
510 type = Token.TEST_SETUP
513 def from_params(cls, name, args=(), separator=FOUR_SPACES, eol=EOL):
514 tokens = [
Token(Token.TEST_SETUP,
'Test Setup'),
515 Token(Token.SEPARATOR, separator),
516 Token(Token.NAME, name)]
518 tokens.extend([
Token(Token.SEPARATOR, separator),
519 Token(Token.ARGUMENT, arg)])
520 tokens.append(
Token(Token.EOL, eol))
526 type = Token.TEST_TEARDOWN
529 def from_params(cls, name, args=(), separator=FOUR_SPACES, eol=EOL):
530 tokens = [
Token(Token.TEST_TEARDOWN,
'Test Teardown'),
531 Token(Token.SEPARATOR, separator),
532 Token(Token.NAME, name)]
534 tokens.extend([
Token(Token.SEPARATOR, separator),
535 Token(Token.ARGUMENT, arg)])
536 tokens.append(
Token(Token.EOL, eol))
542 type = Token.TEST_TEMPLATE
547 Token(Token.TEST_TEMPLATE,
'Test Template'),
548 Token(Token.SEPARATOR, separator),
549 Token(Token.NAME, value),
550 Token(Token.EOL, eol)
556 type = Token.TEST_TIMEOUT
561 Token(Token.TEST_TIMEOUT,
'Test Timeout'),
562 Token(Token.SEPARATOR, separator),
563 Token(Token.ARGUMENT, value),
564 Token(Token.EOL, eol)
570 type = Token.VARIABLE
574 def from_params(cls, name, value, separator=FOUR_SPACES, eol=EOL):
576 tokens = [
Token(Token.VARIABLE, name)]
578 tokens.extend([
Token(Token.SEPARATOR, separator),
579 Token(Token.ARGUMENT, value)])
580 tokens.append(
Token(Token.EOL, eol))
587 name = self.
get_valueget_value(Token.VARIABLE)
588 if name.endswith(
'='):
589 return name[:-1].rstrip()
596 return self.
get_valuesget_values(Token.ARGUMENT)
599 name = self.
get_valueget_value(Token.VARIABLE)
601 if not match.is_assign(allow_assign_mark=
True):
602 self.
errorserrors += (
"Invalid variable name '%s'." % name,)
603 if match.is_dict_assign(allow_assign_mark=
True):
607 for item
in self.
get_valuesget_values(Token.ARGUMENT):
610 "Invalid dictionary variable item '%s'. "
611 "Items must use 'name=value' syntax or be dictionary "
612 "variables themselves." % item,
622 type = Token.TESTCASE_NAME
626 tokens = [
Token(Token.TESTCASE_NAME, name)]
628 tokens.append(
Token(Token.EOL, eol))
635 return self.
get_valueget_value(Token.TESTCASE_NAME)
640 type = Token.KEYWORD_NAME
644 tokens = [
Token(Token.KEYWORD_NAME, name)]
646 tokens.append(
Token(Token.EOL, eol))
653 return self.
get_valueget_value(Token.KEYWORD_NAME)
661 def from_params(cls, name, args=(), indent=FOUR_SPACES, separator=FOUR_SPACES,
663 tokens = [
Token(Token.SEPARATOR, indent),
664 Token(Token.SETUP,
'[Setup]'),
665 Token(Token.SEPARATOR, separator),
666 Token(Token.NAME, name)]
668 tokens.extend([
Token(Token.SEPARATOR, separator),
669 Token(Token.ARGUMENT, arg)])
670 tokens.append(
Token(Token.EOL, eol))
676 type = Token.TEARDOWN
679 def from_params(cls, name, args=(), indent=FOUR_SPACES, separator=FOUR_SPACES,
681 tokens = [
Token(Token.SEPARATOR, indent),
682 Token(Token.TEARDOWN,
'[Teardown]'),
683 Token(Token.SEPARATOR, separator),
684 Token(Token.NAME, name)]
686 tokens.extend([
Token(Token.SEPARATOR, separator),
687 Token(Token.ARGUMENT, arg)])
688 tokens.append(
Token(Token.EOL, eol))
697 def from_params(cls, values, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL):
698 tokens = [
Token(Token.SEPARATOR, indent),
699 Token(Token.TAGS,
'[Tags]')]
701 tokens.extend([
Token(Token.SEPARATOR, separator),
702 Token(Token.ARGUMENT, tag)])
703 tokens.append(
Token(Token.EOL, eol))
709 type = Token.TEMPLATE
712 def from_params(cls, value, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL):
714 Token(Token.SEPARATOR, indent),
715 Token(Token.TEMPLATE,
'[Template]'),
716 Token(Token.SEPARATOR, separator),
717 Token(Token.NAME, value),
718 Token(Token.EOL, eol)
727 def from_params(cls, value, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL):
729 Token(Token.SEPARATOR, indent),
730 Token(Token.TIMEOUT,
'[Timeout]'),
731 Token(Token.SEPARATOR, separator),
732 Token(Token.ARGUMENT, value),
733 Token(Token.EOL, eol)
739 type = Token.ARGUMENTS
742 def from_params(cls, args, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL):
743 tokens = [
Token(Token.SEPARATOR, indent),
744 Token(Token.ARGUMENTS,
'[Arguments]')]
746 tokens.extend([
Token(Token.SEPARATOR, separator),
747 Token(Token.ARGUMENT, arg)])
748 tokens.append(
Token(Token.EOL, eol))
753 UserKeywordArgumentParser(error_reporter=errors.append).parse(self.
valuesvaluesvalues)
762 def from_params(cls, args, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL):
763 tokens = [
Token(Token.SEPARATOR, indent),
764 Token(Token.RETURN,
'[Return]')]
766 tokens.extend([
Token(Token.SEPARATOR, separator),
767 Token(Token.ARGUMENT, arg)])
768 tokens.append(
Token(Token.EOL, eol))
777 def from_params(cls, name, assign=(), args=(), indent=FOUR_SPACES,
778 separator=FOUR_SPACES, eol=EOL):
779 tokens = [
Token(Token.SEPARATOR, indent)]
780 for assignment
in assign:
781 tokens.extend([
Token(Token.ASSIGN, assignment),
782 Token(Token.SEPARATOR, separator)])
783 tokens.append(
Token(Token.KEYWORD, name))
785 tokens.extend([
Token(Token.SEPARATOR, separator),
786 Token(Token.ARGUMENT, arg)])
787 tokens.append(
Token(Token.EOL, eol))
794 return self.
get_valueget_value(Token.KEYWORD)
800 return self.
get_valuesget_values(Token.ARGUMENT)
806 return self.
get_valuesget_values(Token.ASSIGN)
811 type = Token.ARGUMENT
814 def from_params(cls, args, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL):
816 for index, arg
in enumerate(args):
817 tokens.extend([
Token(Token.SEPARATOR, separator
if index
else indent),
818 Token(Token.ARGUMENT, arg)])
819 tokens.append(
Token(Token.EOL, eol))
834 def from_params(cls, variables, values, flavor='IN', indent=FOUR_SPACES,
835 separator=FOUR_SPACES, eol=EOL):
836 tokens = [
Token(Token.SEPARATOR, indent),
838 Token(Token.SEPARATOR, separator)]
839 for variable
in variables:
840 tokens.extend([
Token(Token.VARIABLE, variable),
841 Token(Token.SEPARATOR, separator)])
842 tokens.append(
Token(Token.FOR_SEPARATOR, flavor))
844 tokens.extend([
Token(Token.SEPARATOR, separator),
845 Token(Token.ARGUMENT, value)])
846 tokens.append(
Token(Token.EOL, eol))
853 return self.
get_valuesget_values(Token.VARIABLE)
859 return self.
get_valuesget_values(Token.ARGUMENT)
865 separator = self.
get_tokenget_token(Token.FOR_SEPARATOR)
870 self.
_add_error_add_error(
'no loop variables')
872 self.
_add_error_add_error(
"no 'IN' or other valid separator")
876 self.
_add_error_add_error(
"invalid loop variable '%s'" % var)
881 self.
errorserrors += (
'FOR loop has %s.' % error,)
904 def from_params(cls, condition, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL):
905 tokens = [
Token(Token.SEPARATOR, indent),
907 Token(Token.SEPARATOR, separator),
908 Token(Token.ARGUMENT, condition)]
909 if cls.
typetypetype != Token.INLINE_IF:
910 tokens.append(
Token(Token.EOL, eol))
917 values = self.
get_valuesget_values(Token.ARGUMENT)
919 return ', '.join(values)
if values
else None
923 conditions = len(self.
get_tokensget_tokens(Token.ARGUMENT))
925 self.
errorserrors += (
'%s must have a condition.' % self.
typetypetype,)
927 self.
errorserrors += (
'%s cannot have more than one condition.' % self.
typetypetype,)
932 type = Token.INLINE_IF
938 return self.
get_valuesget_values(Token.ASSIGN)
953 Token(Token.SEPARATOR, indent),
955 Token(Token.EOL, eol)
960 values = self.
get_valuesget_values(Token.ARGUMENT)
961 self.
errorserrors += (f
'ELSE does not accept arguments, got {seq2str(values)}.',)
969 Token(Token.SEPARATOR, indent),
971 Token(Token.EOL, eol)
976 self.
errorserrors += (f
'{self.type} does not accept arguments, got '
977 f
'{seq2str(self.values)}.',)
983 return self.
get_valuesget_values(Token.ARGUMENT)
996 def from_params(cls, patterns=(), type=
None, variable=
None, indent=FOUR_SPACES,
997 separator=FOUR_SPACES, eol=EOL):
998 tokens = [
Token(Token.SEPARATOR, indent),
1000 for pattern
in patterns:
1001 tokens.extend([
Token(Token.SEPARATOR, separator),
1002 Token(Token.ARGUMENT, pattern)]),
1004 tokens.extend([
Token(Token.SEPARATOR, separator),
1005 Token(Token.OPTION, f
'type={type}')])
1007 tokens.extend([
Token(Token.SEPARATOR, separator),
1009 Token(Token.SEPARATOR, separator),
1010 Token(Token.VARIABLE, variable)])
1011 tokens.append(
Token(Token.EOL, eol))
1018 return self.
get_valuesget_values(Token.ARGUMENT)
1021 pattern_type = property
1024 value = self.
get_valueget_value(Token.OPTION)
1025 return value[len(
'type='):]
if value
else None
1031 return self.
get_valueget_value(Token.VARIABLE)
1034 as_token = self.
get_tokenget_token(Token.AS)
1036 variables = self.
get_tokensget_tokens(Token.VARIABLE)
1038 self.
errorserrors += (
"EXCEPT's AS requires variable.",)
1039 elif len(variables) > 1:
1040 self.
errorserrors += (
"EXCEPT's AS accepts only one variable.",)
1042 self.
errorserrors += (f
"EXCEPT's AS variable '{variables[0].value}' is invalid.",)
1047 type = Token.FINALLY
1061 separator=FOUR_SPACES, eol=EOL):
1062 tokens = [
Token(Token.SEPARATOR, indent),
1064 Token(Token.SEPARATOR, separator),
1065 Token(Token.ARGUMENT, condition)]
1067 tokens.extend([
Token(Token.SEPARATOR, indent),
1068 Token(Token.OPTION, f
'limit={limit}')])
1069 tokens.append(
Token(Token.EOL, eol))
1073 condition = property
1076 return ', '.join(self.
get_valuesget_values(Token.ARGUMENT))
1082 value = self.
get_valueget_value(Token.OPTION)
1083 return value[len(
'limit='):]
if value
else None
1086 values = self.
get_valuesget_values(Token.ARGUMENT)
1087 if len(values) == 0:
1088 self.
errorserrors += (
'WHILE must have a condition.',)
1089 if len(values) == 2:
1090 self.
errorserrors += (f
"Second WHILE loop argument must be 'limit', "
1091 f
"got '{values[1]}'.",)
1093 self.
errorserrors += (
'WHILE cannot have more than one condition.',)
1098 type = Token.RETURN_STATEMENT
1104 return self.
get_valuesget_values(Token.ARGUMENT)
1107 def from_params(cls, values=(), indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL):
1108 tokens = [
Token(Token.SEPARATOR, indent),
1109 Token(Token.RETURN_STATEMENT)]
1110 for value
in values:
1111 tokens.extend([
Token(Token.SEPARATOR, separator),
1112 Token(Token.ARGUMENT, value)])
1113 tokens.append(
Token(Token.EOL, eol))
1117 if not context.in_keyword:
1118 self.
errorserrors += (
'RETURN can only be used inside a user keyword.', )
1119 if context.in_keyword
and context.in_finally:
1120 self.
errorserrors += (
'RETURN cannot be used in FINALLY branch.', )
1126 super(LoopControl, self).
validate(context)
1127 if not (context.in_for
or context.in_while):
1128 self.
errorserrors += (f
'{self.type} can only be used inside a loop.', )
1129 if context.in_finally:
1130 self.
errorserrors += (f
'{self.type} cannot be used in FINALLY branch.', )
1135 type = Token.CONTINUE
1145 type = Token.COMMENT
1150 Token(Token.SEPARATOR, indent),
1151 Token(Token.COMMENT, comment),
1152 Token(Token.EOL, eol)
1163 Token(Token.CONFIG, config),
1164 Token(Token.EOL, eol)
1171 value = self.
get_valueget_value(Token.CONFIG)
1172 return Language.from_name(value[len(
'language:'):])
if value
else None
1178 handles_types = (Token.ERROR, Token.FATAL_ERROR)
1193 tokens = self.
get_tokensget_tokens(Token.ERROR, Token.FATAL_ERROR)
1194 return tuple(t.error
for t
in tokens) + self.
_errors_errors_errors
1207 return cls([
Token(Token.EOL, eol)])
Token representing piece of Robot Framework data.
def from_params(cls, args, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL)
def validate(self, context)
def from_params(cls, config, eol=EOL)
def from_params(cls, value, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL, settings_section=True)
def from_params(cls, eol=EOL)
errors
Errors got from the underlying ERROR and FATAL_ERROR tokens.
def from_params(cls, name, assign=(), args=(), indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL)
def from_params(cls, name, eol=EOL)
def from_params(cls, name, args=(), alias=None, separator=FOUR_SPACES, eol=EOL)
def validate(self, context)
def from_params(cls, name, separator=FOUR_SPACES, eol=EOL)
def validate(self, context)
def from_params(cls, values=(), indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL)
def from_params(cls, args, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL)
def from_params(cls, name, args=(), indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL)
def from_tokens(cls, tokens)
def get_tokens(self, *types)
Return tokens having any of the given types.
def __init__(self, tokens, errors=())
def get_token(self, *types)
Return a token with the given type.
def register(cls, subcls)
def from_params(cls, *args, **kwargs)
Create statement from passed parameters.
def get_values(self, *types)
Return values of tokens having any of the given types.
def get_value(self, type, default=None)
Return value of a token with the given type.
def __getitem__(self, item)
dictionary _statement_handlers
def validate(self, context)
def from_params(cls, name, args=(), separator=FOUR_SPACES, eol=EOL)
def from_params(cls, name, args=(), separator=FOUR_SPACES, eol=EOL)
def from_params(cls, name, args=(), indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL)
def from_params(cls, args, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL)
def from_params(cls, value, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL)
def from_params(cls, name, eol=EOL)
def from_params(cls, name, args=(), separator=FOUR_SPACES, eol=EOL)
def from_params(cls, name, args=(), separator=FOUR_SPACES, eol=EOL)
def from_params(cls, value, separator=FOUR_SPACES, eol=EOL)
def from_params(cls, value, separator=FOUR_SPACES, eol=EOL)
def from_params(cls, value, indent=FOUR_SPACES, separator=FOUR_SPACES, eol=EOL)
def _is_valid_dict_item(self, item)
def validate(self, context)
def from_params(cls, name, value, separator=FOUR_SPACES, eol=EOL)
value can be given either as a string or as a list of strings.
def _validate_dict_items(self)
def from_params(cls, name, args=(), separator=FOUR_SPACES, eol=EOL)
def split_from_equals(string)
def normalize_whitespace(string)
def search_variable(string, identifiers='$@&% *', ignore_errors=False)
def is_dict_variable(string)
def is_scalar_assign(string, allow_assign_mark=False)