robot.parsing.lexer package¶
Submodules¶
robot.parsing.lexer.blocklexers module¶
-
class
robot.parsing.lexer.blocklexers.
BlockLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.Lexer
-
classmethod
handles
(statement, ctx)¶
-
classmethod
-
class
robot.parsing.lexer.blocklexers.
FileLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.BlockLexer
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
SectionLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.BlockLexer
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_classes
()¶
-
lexer_for
(statement)¶
-
classmethod
-
class
robot.parsing.lexer.blocklexers.
SettingSectionLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.SectionLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
VariableSectionLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.SectionLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
TestCaseSectionLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.SectionLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
TaskSectionLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.SectionLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
KeywordSectionLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.SettingSectionLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
CommentSectionLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.SectionLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
ImplicitCommentSectionLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.SectionLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
ErrorSectionLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.SectionLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
TestOrKeywordLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.BlockLexer
-
name_type
= NotImplemented¶
-
classmethod
handles
(statement, ctx)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
TestCaseLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.TestOrKeywordLexer
-
name_type
= 'TESTCASE NAME'¶
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lexer_classes
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
KeywordLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.TestOrKeywordLexer
-
name_type
= 'KEYWORD NAME'¶
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_classes
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
NestedBlockLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.BlockLexer
-
classmethod
handles
(statement, ctx)¶
-
lex
()¶
-
lexer_classes
()¶
-
lexer_for
(statement)¶
-
classmethod
-
class
robot.parsing.lexer.blocklexers.
ForLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.NestedBlockLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
WhileLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.NestedBlockLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
IfLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.NestedBlockLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
lexer_for
(statement)¶
-
-
class
robot.parsing.lexer.blocklexers.
InlineIfLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.blocklexers.BlockLexer
-
lex
()¶
-
lexer_for
(statement)¶
-
robot.parsing.lexer.context module¶
-
class
robot.parsing.lexer.context.
LexingContext
(settings=None, lang=None)[source]¶ Bases:
object
-
settings_class
= None¶
-
-
class
robot.parsing.lexer.context.
FileContext
(settings=None, lang=None)[source]¶ Bases:
robot.parsing.lexer.context.LexingContext
-
lex_setting
(statement)¶
-
settings_class
= None¶
-
-
class
robot.parsing.lexer.context.
TestCaseFileContext
(settings=None, lang=None)[source]¶ Bases:
robot.parsing.lexer.context.FileContext
-
settings_class
¶
-
add_language
(lang)¶
-
comment_section
(statement)¶
-
keyword_context
()¶
-
keyword_section
(statement)¶
-
lex_invalid_section
(statement)¶
-
lex_setting
(statement)¶
-
setting_section
(statement)¶
-
variable_section
(statement)¶
-
-
class
robot.parsing.lexer.context.
ResourceFileContext
(settings=None, lang=None)[source]¶ Bases:
robot.parsing.lexer.context.FileContext
-
settings_class
¶
-
add_language
(lang)¶
-
comment_section
(statement)¶
-
keyword_context
()¶
-
keyword_section
(statement)¶
-
lex_invalid_section
(statement)¶
-
lex_setting
(statement)¶
-
setting_section
(statement)¶
-
task_section
(statement)¶
-
test_case_section
(statement)¶
-
variable_section
(statement)¶
-
-
class
robot.parsing.lexer.context.
InitFileContext
(settings=None, lang=None)[source]¶ Bases:
robot.parsing.lexer.context.FileContext
-
settings_class
¶
-
add_language
(lang)¶
-
comment_section
(statement)¶
-
keyword_context
()¶
-
keyword_section
(statement)¶
-
lex_invalid_section
(statement)¶
-
lex_setting
(statement)¶
-
setting_section
(statement)¶
-
task_section
(statement)¶
-
test_case_section
(statement)¶
-
variable_section
(statement)¶
-
robot.parsing.lexer.lexer module¶
-
robot.parsing.lexer.lexer.
get_tokens
(source, data_only=False, tokenize_variables=False, lang=None)[source]¶ Parses the given source to tokens.
Parameters: - source – The source where to read the data. Can be a path to
a source file as a string or as
pathlib.Path
object, an already opened file object, or Unicode text containing the date directly. Source files must be UTF-8 encoded. - data_only – When
False
(default), returns all tokens. When set toTrue
, omits separators, comments, continuation markers, and other non-data tokens. - tokenize_variables – When
True
, possible variables in keyword arguments and elsewhere are tokenized. See thetokenize_variables()
method for details. - lang – Additional languages to be supported during parsing.
Can be a string matching any of the supported language codes or names,
an initialized
Language
subsclass, a list containing such strings or instances, or aLanguages
instance.
Returns a generator that yields
Token
instances.- source – The source where to read the data. Can be a path to
a source file as a string or as
-
robot.parsing.lexer.lexer.
get_resource_tokens
(source, data_only=False, tokenize_variables=False, lang=None)[source]¶ Parses the given source to resource file tokens.
Same as
get_tokens()
otherwise, but the source is considered to be a resource file. This affects, for example, what settings are valid.
-
robot.parsing.lexer.lexer.
get_init_tokens
(source, data_only=False, tokenize_variables=False, lang=None)[source]¶ Parses the given source to init file tokens.
Same as
get_tokens()
otherwise, but the source is considered to be a suite initialization file. This affects, for example, what settings are valid.
robot.parsing.lexer.settings module¶
-
class
robot.parsing.lexer.settings.
Settings
(languages)[source]¶ Bases:
object
-
names
= ()¶
-
aliases
= {}¶
-
multi_use
= ('Metadata', 'Library', 'Resource', 'Variables')¶
-
single_value
= ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')¶
-
name_and_arguments
= ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')¶
-
name_arguments_and_with_name
= ('Library',)¶
-
-
class
robot.parsing.lexer.settings.
TestCaseFileSettings
(languages)[source]¶ Bases:
robot.parsing.lexer.settings.Settings
-
names
= ('Documentation', 'Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Test Timeout', 'Test Tags', 'Default Tags', 'Keyword Tags', 'Library', 'Resource', 'Variables')¶
-
aliases
= {'Force Tags': 'Test Tags', 'Task Setup': 'Test Setup', 'Task Tags': 'Test Tags', 'Task Teardown': 'Test Teardown', 'Task Template': 'Test Template', 'Task Timeout': 'Test Timeout'}¶
-
lex
(statement)¶
-
multi_use
= ('Metadata', 'Library', 'Resource', 'Variables')¶
-
name_and_arguments
= ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')¶
-
name_arguments_and_with_name
= ('Library',)¶
-
single_value
= ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')¶
-
-
class
robot.parsing.lexer.settings.
InitFileSettings
(languages)[source]¶ Bases:
robot.parsing.lexer.settings.Settings
-
names
= ('Documentation', 'Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Timeout', 'Test Tags', 'Keyword Tags', 'Library', 'Resource', 'Variables')¶
-
aliases
= {'Force Tags': 'Test Tags', 'Task Setup': 'Test Setup', 'Task Tags': 'Test Tags', 'Task Teardown': 'Test Teardown', 'Task Timeout': 'Test Timeout'}¶
-
lex
(statement)¶
-
multi_use
= ('Metadata', 'Library', 'Resource', 'Variables')¶
-
name_and_arguments
= ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')¶
-
name_arguments_and_with_name
= ('Library',)¶
-
single_value
= ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')¶
-
-
class
robot.parsing.lexer.settings.
ResourceFileSettings
(languages)[source]¶ Bases:
robot.parsing.lexer.settings.Settings
-
names
= ('Documentation', 'Keyword Tags', 'Library', 'Resource', 'Variables')¶
-
aliases
= {}¶
-
lex
(statement)¶
-
multi_use
= ('Metadata', 'Library', 'Resource', 'Variables')¶
-
name_and_arguments
= ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')¶
-
name_arguments_and_with_name
= ('Library',)¶
-
single_value
= ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')¶
-
-
class
robot.parsing.lexer.settings.
TestCaseSettings
(parent, languages)[source]¶ Bases:
robot.parsing.lexer.settings.Settings
-
names
= ('Documentation', 'Tags', 'Setup', 'Teardown', 'Template', 'Timeout')¶
-
template_set
¶
-
aliases
= {}¶
-
lex
(statement)¶
-
multi_use
= ('Metadata', 'Library', 'Resource', 'Variables')¶
-
name_and_arguments
= ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')¶
-
name_arguments_and_with_name
= ('Library',)¶
-
single_value
= ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')¶
-
-
class
robot.parsing.lexer.settings.
KeywordSettings
(languages)[source]¶ Bases:
robot.parsing.lexer.settings.Settings
-
names
= ('Documentation', 'Arguments', 'Teardown', 'Timeout', 'Tags', 'Return')¶
-
aliases
= {}¶
-
lex
(statement)¶
-
multi_use
= ('Metadata', 'Library', 'Resource', 'Variables')¶
-
name_and_arguments
= ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')¶
-
name_arguments_and_with_name
= ('Library',)¶
-
single_value
= ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')¶
-
robot.parsing.lexer.statementlexers module¶
-
class
robot.parsing.lexer.statementlexers.
StatementLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.Lexer
-
token_type
= None¶
-
classmethod
handles
(statement, ctx)¶
-
-
class
robot.parsing.lexer.statementlexers.
SingleType
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.StatementLexer
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
token_type
= None¶
-
-
class
robot.parsing.lexer.statementlexers.
TypeAndArguments
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.StatementLexer
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
token_type
= None¶
-
-
class
robot.parsing.lexer.statementlexers.
SectionHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.SingleType
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
token_type
= None¶
-
-
class
robot.parsing.lexer.statementlexers.
SettingSectionHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.SectionHeaderLexer
-
token_type
= 'SETTING HEADER'¶
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
VariableSectionHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.SectionHeaderLexer
-
token_type
= 'VARIABLE HEADER'¶
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
TestCaseSectionHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.SectionHeaderLexer
-
token_type
= 'TESTCASE HEADER'¶
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
TaskSectionHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.SectionHeaderLexer
-
token_type
= 'TASK HEADER'¶
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
KeywordSectionHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.SectionHeaderLexer
-
token_type
= 'KEYWORD HEADER'¶
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
CommentSectionHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.SectionHeaderLexer
-
token_type
= 'COMMENT HEADER'¶
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
ErrorSectionHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.SectionHeaderLexer
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
token_type
= None¶
-
-
class
robot.parsing.lexer.statementlexers.
CommentLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.SingleType
-
token_type
= 'COMMENT'¶
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
ImplicitCommentLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.CommentLexer
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
token_type
= 'COMMENT'¶
-
-
class
robot.parsing.lexer.statementlexers.
SettingLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.StatementLexer
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
token_type
= None¶
-
-
class
robot.parsing.lexer.statementlexers.
TestOrKeywordSettingLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.SettingLexer
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
token_type
= None¶
-
-
class
robot.parsing.lexer.statementlexers.
VariableLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.TypeAndArguments
-
token_type
= 'VARIABLE'¶
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
KeywordCallLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.StatementLexer
-
accepts_more
(statement)¶
-
classmethod
handles
(statement, ctx)¶
-
input
(statement)¶
-
token_type
= None¶
-
-
class
robot.parsing.lexer.statementlexers.
ForHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.StatementLexer
-
separators
= ('IN', 'IN RANGE', 'IN ENUMERATE', 'IN ZIP')¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
token_type
= None¶
-
-
class
robot.parsing.lexer.statementlexers.
IfHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.TypeAndArguments
-
token_type
= 'IF'¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
InlineIfHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.StatementLexer
-
token_type
= 'INLINE IF'¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
-
class
robot.parsing.lexer.statementlexers.
ElseIfHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.TypeAndArguments
-
token_type
= 'ELSE IF'¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
ElseHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.TypeAndArguments
-
token_type
= 'ELSE'¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
TryHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.TypeAndArguments
-
token_type
= 'TRY'¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
ExceptHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.StatementLexer
-
token_type
= 'EXCEPT'¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
-
class
robot.parsing.lexer.statementlexers.
FinallyHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.TypeAndArguments
-
token_type
= 'FINALLY'¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
WhileHeaderLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.StatementLexer
-
token_type
= 'WHILE'¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
-
class
robot.parsing.lexer.statementlexers.
EndLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.TypeAndArguments
-
token_type
= 'END'¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
-
class
robot.parsing.lexer.statementlexers.
ReturnLexer
(ctx)[source]¶ Bases:
robot.parsing.lexer.statementlexers.TypeAndArguments
-
token_type
= 'RETURN STATEMENT'¶
-
accepts_more
(statement)¶
-
input
(statement)¶
-
lex
()¶
-
robot.parsing.lexer.tokenizer module¶
robot.parsing.lexer.tokens module¶
-
class
robot.parsing.lexer.tokens.
Token
(type=None, value=None, lineno=-1, col_offset=-1, error=None)[source]¶ Bases:
object
Token representing piece of Robot Framework data.
Each token has type, value, line number, column offset and end column offset in
type
,value
,lineno
,col_offset
andend_col_offset
attributes, respectively. Tokens representing error also have their error message inerror
attribute.Token types are declared as class attributes such as
SETTING_HEADER
andEOL
. Values of these constants have changed slightly in Robot Framework 4.0 and they may change again in the future. It is thus safer to use the constants, not their values, when types are needed. For example, useToken(Token.EOL)
instead ofToken('EOL')
andtoken.type == Token.EOL
instead oftoken.type == 'EOL'
.If
value
is not given whenToken
is initialized andtype
isIF
,ELSE_IF
,ELSE
,FOR
,END
,WITH_NAME
orCONTINUATION
, the value is automatically set to the correct marker value like'IF'
or'ELSE IF'
. Iftype
isEOL
in this case, the value is set to'\n'
.-
SETTING_HEADER
= 'SETTING HEADER'¶
-
VARIABLE_HEADER
= 'VARIABLE HEADER'¶
-
TESTCASE_HEADER
= 'TESTCASE HEADER'¶
-
TASK_HEADER
= 'TASK HEADER'¶
-
KEYWORD_HEADER
= 'KEYWORD HEADER'¶
-
COMMENT_HEADER
= 'COMMENT HEADER'¶
-
TESTCASE_NAME
= 'TESTCASE NAME'¶
-
KEYWORD_NAME
= 'KEYWORD NAME'¶
-
DOCUMENTATION
= 'DOCUMENTATION'¶
-
SUITE_SETUP
= 'SUITE SETUP'¶
-
SUITE_TEARDOWN
= 'SUITE TEARDOWN'¶
-
METADATA
= 'METADATA'¶
-
TEST_SETUP
= 'TEST SETUP'¶
-
TEST_TEARDOWN
= 'TEST TEARDOWN'¶
-
TEST_TEMPLATE
= 'TEST TEMPLATE'¶
-
TEST_TIMEOUT
= 'TEST TIMEOUT'¶
-
FORCE_TAGS
= 'FORCE TAGS'¶
-
DEFAULT_TAGS
= 'DEFAULT TAGS'¶
-
KEYWORD_TAGS
= 'KEYWORD TAGS'¶
-
LIBRARY
= 'LIBRARY'¶
-
RESOURCE
= 'RESOURCE'¶
-
VARIABLES
= 'VARIABLES'¶
-
SETUP
= 'SETUP'¶
-
TEARDOWN
= 'TEARDOWN'¶
-
TEMPLATE
= 'TEMPLATE'¶
-
TIMEOUT
= 'TIMEOUT'¶
-
TAGS
= 'TAGS'¶
-
ARGUMENTS
= 'ARGUMENTS'¶
-
RETURN
= 'RETURN'¶
-
RETURN_SETTING
= 'RETURN'¶
-
NAME
= 'NAME'¶
-
VARIABLE
= 'VARIABLE'¶
-
ARGUMENT
= 'ARGUMENT'¶
-
ASSIGN
= 'ASSIGN'¶
-
KEYWORD
= 'KEYWORD'¶
-
WITH_NAME
= 'WITH NAME'¶
-
FOR
= 'FOR'¶
-
FOR_SEPARATOR
= 'FOR SEPARATOR'¶
-
END
= 'END'¶
-
IF
= 'IF'¶
-
INLINE_IF
= 'INLINE IF'¶
-
ELSE_IF
= 'ELSE IF'¶
-
ELSE
= 'ELSE'¶
-
TRY
= 'TRY'¶
-
EXCEPT
= 'EXCEPT'¶
-
FINALLY
= 'FINALLY'¶
-
AS
= 'AS'¶
-
WHILE
= 'WHILE'¶
-
RETURN_STATEMENT
= 'RETURN STATEMENT'¶
-
CONTINUE
= 'CONTINUE'¶
-
BREAK
= 'BREAK'¶
-
OPTION
= 'OPTION'¶
-
SEPARATOR
= 'SEPARATOR'¶
-
COMMENT
= 'COMMENT'¶
-
CONTINUATION
= 'CONTINUATION'¶
-
CONFIG
= 'CONFIG'¶
-
EOL
= 'EOL'¶
-
EOS
= 'EOS'¶
-
ERROR
= 'ERROR'¶
-
FATAL_ERROR
= 'FATAL ERROR'¶
-
NON_DATA_TOKENS
= frozenset({'EOL', 'CONTINUATION', 'EOS', 'COMMENT', 'SEPARATOR'})¶
-
SETTING_TOKENS
= frozenset({'RESOURCE', 'KEYWORD TAGS', 'DOCUMENTATION', 'DEFAULT TAGS', 'RETURN', 'VARIABLES', 'ARGUMENTS', 'TEARDOWN', 'SETUP', 'TEST TEARDOWN', 'TEST TEMPLATE', 'TEST SETUP', 'TAGS', 'TEST TIMEOUT', 'SUITE SETUP', 'METADATA', 'TEMPLATE', 'FORCE TAGS', 'TIMEOUT', 'SUITE TEARDOWN', 'LIBRARY'})¶
-
HEADER_TOKENS
= frozenset({'SETTING HEADER', 'TESTCASE HEADER', 'COMMENT HEADER', 'KEYWORD HEADER', 'TASK HEADER', 'VARIABLE HEADER'})¶
-
ALLOW_VARIABLES
= frozenset({'TESTCASE NAME', 'ARGUMENT', 'KEYWORD NAME', 'NAME'})¶
-
type
¶
-
value
¶
-
lineno
¶
-
col_offset
¶
-
error
¶
-
end_col_offset
¶
-
tokenize_variables
()[source]¶ Tokenizes possible variables in token value.
Yields the token itself if the token does not allow variables (see
Token.ALLOW_VARIABLES
) or its value does not contain variables. Otherwise yields variable tokens as well as tokens before, after, or between variables so that they have the same type as the original token.
-
-
class
robot.parsing.lexer.tokens.
EOS
(lineno=-1, col_offset=-1)[source]¶ Bases:
robot.parsing.lexer.tokens.Token
Token representing end of a statement.
-
ALLOW_VARIABLES
= frozenset({'TESTCASE NAME', 'ARGUMENT', 'KEYWORD NAME', 'NAME'})¶
-
ARGUMENT
= 'ARGUMENT'¶
-
ARGUMENTS
= 'ARGUMENTS'¶
-
AS
= 'AS'¶
-
ASSIGN
= 'ASSIGN'¶
-
BREAK
= 'BREAK'¶
-
COMMENT
= 'COMMENT'¶
-
COMMENT_HEADER
= 'COMMENT HEADER'¶
-
CONFIG
= 'CONFIG'¶
-
CONTINUATION
= 'CONTINUATION'¶
-
CONTINUE
= 'CONTINUE'¶
-
DEFAULT_TAGS
= 'DEFAULT TAGS'¶
-
DOCUMENTATION
= 'DOCUMENTATION'¶
-
ELSE
= 'ELSE'¶
-
ELSE_IF
= 'ELSE IF'¶
-
END
= 'END'¶
-
EOL
= 'EOL'¶
-
EOS
= 'EOS'¶
-
ERROR
= 'ERROR'¶
-
EXCEPT
= 'EXCEPT'¶
-
FATAL_ERROR
= 'FATAL ERROR'¶
-
FINALLY
= 'FINALLY'¶
-
FOR
= 'FOR'¶
-
FORCE_TAGS
= 'FORCE TAGS'¶
-
FOR_SEPARATOR
= 'FOR SEPARATOR'¶
-
HEADER_TOKENS
= frozenset({'SETTING HEADER', 'TESTCASE HEADER', 'COMMENT HEADER', 'KEYWORD HEADER', 'TASK HEADER', 'VARIABLE HEADER'})¶
-
IF
= 'IF'¶
-
INLINE_IF
= 'INLINE IF'¶
-
KEYWORD
= 'KEYWORD'¶
-
KEYWORD_HEADER
= 'KEYWORD HEADER'¶
-
KEYWORD_NAME
= 'KEYWORD NAME'¶
-
KEYWORD_TAGS
= 'KEYWORD TAGS'¶
-
LIBRARY
= 'LIBRARY'¶
-
METADATA
= 'METADATA'¶
-
NAME
= 'NAME'¶
-
NON_DATA_TOKENS
= frozenset({'EOL', 'CONTINUATION', 'EOS', 'COMMENT', 'SEPARATOR'})¶
-
OPTION
= 'OPTION'¶
-
RESOURCE
= 'RESOURCE'¶
-
RETURN
= 'RETURN'¶
-
RETURN_SETTING
= 'RETURN'¶
-
RETURN_STATEMENT
= 'RETURN STATEMENT'¶
-
SEPARATOR
= 'SEPARATOR'¶
-
SETTING_HEADER
= 'SETTING HEADER'¶
-
SETTING_TOKENS
= frozenset({'RESOURCE', 'KEYWORD TAGS', 'DOCUMENTATION', 'DEFAULT TAGS', 'RETURN', 'VARIABLES', 'ARGUMENTS', 'TEARDOWN', 'SETUP', 'TEST TEARDOWN', 'TEST TEMPLATE', 'TEST SETUP', 'TAGS', 'TEST TIMEOUT', 'SUITE SETUP', 'METADATA', 'TEMPLATE', 'FORCE TAGS', 'TIMEOUT', 'SUITE TEARDOWN', 'LIBRARY'})¶
-
SETUP
= 'SETUP'¶
-
SUITE_SETUP
= 'SUITE SETUP'¶
-
SUITE_TEARDOWN
= 'SUITE TEARDOWN'¶
-
TAGS
= 'TAGS'¶
-
TASK_HEADER
= 'TASK HEADER'¶
-
TEARDOWN
= 'TEARDOWN'¶
-
TEMPLATE
= 'TEMPLATE'¶
-
TESTCASE_HEADER
= 'TESTCASE HEADER'¶
-
TESTCASE_NAME
= 'TESTCASE NAME'¶
-
TEST_SETUP
= 'TEST SETUP'¶
-
TEST_TEARDOWN
= 'TEST TEARDOWN'¶
-
TEST_TEMPLATE
= 'TEST TEMPLATE'¶
-
TEST_TIMEOUT
= 'TEST TIMEOUT'¶
-
TIMEOUT
= 'TIMEOUT'¶
-
TRY
= 'TRY'¶
-
VARIABLE
= 'VARIABLE'¶
-
VARIABLES
= 'VARIABLES'¶
-
VARIABLE_HEADER
= 'VARIABLE HEADER'¶
-
WHILE
= 'WHILE'¶
-
WITH_NAME
= 'WITH NAME'¶
-
col_offset
¶
-
end_col_offset
¶
-
error
¶
-
lineno
¶
-
set_error
(error, fatal=False)¶
-
tokenize_variables
()¶ Tokenizes possible variables in token value.
Yields the token itself if the token does not allow variables (see
Token.ALLOW_VARIABLES
) or its value does not contain variables. Otherwise yields variable tokens as well as tokens before, after, or between variables so that they have the same type as the original token.
-
type
¶
-
value
¶
-
-
class
robot.parsing.lexer.tokens.
END
(lineno=-1, col_offset=-1, virtual=False)[source]¶ Bases:
robot.parsing.lexer.tokens.Token
Token representing END token used to signify block ending.
Virtual END tokens have ‘’ as their value, with “real” END tokens the value is ‘END’.
-
ALLOW_VARIABLES
= frozenset({'TESTCASE NAME', 'ARGUMENT', 'KEYWORD NAME', 'NAME'})¶
-
ARGUMENT
= 'ARGUMENT'¶
-
ARGUMENTS
= 'ARGUMENTS'¶
-
AS
= 'AS'¶
-
ASSIGN
= 'ASSIGN'¶
-
BREAK
= 'BREAK'¶
-
COMMENT
= 'COMMENT'¶
-
COMMENT_HEADER
= 'COMMENT HEADER'¶
-
CONFIG
= 'CONFIG'¶
-
CONTINUATION
= 'CONTINUATION'¶
-
CONTINUE
= 'CONTINUE'¶
-
DEFAULT_TAGS
= 'DEFAULT TAGS'¶
-
DOCUMENTATION
= 'DOCUMENTATION'¶
-
ELSE
= 'ELSE'¶
-
ELSE_IF
= 'ELSE IF'¶
-
END
= 'END'¶
-
EOL
= 'EOL'¶
-
EOS
= 'EOS'¶
-
ERROR
= 'ERROR'¶
-
EXCEPT
= 'EXCEPT'¶
-
FATAL_ERROR
= 'FATAL ERROR'¶
-
FINALLY
= 'FINALLY'¶
-
FOR
= 'FOR'¶
-
FORCE_TAGS
= 'FORCE TAGS'¶
-
FOR_SEPARATOR
= 'FOR SEPARATOR'¶
-
HEADER_TOKENS
= frozenset({'SETTING HEADER', 'TESTCASE HEADER', 'COMMENT HEADER', 'KEYWORD HEADER', 'TASK HEADER', 'VARIABLE HEADER'})¶
-
IF
= 'IF'¶
-
INLINE_IF
= 'INLINE IF'¶
-
KEYWORD
= 'KEYWORD'¶
-
KEYWORD_HEADER
= 'KEYWORD HEADER'¶
-
KEYWORD_NAME
= 'KEYWORD NAME'¶
-
KEYWORD_TAGS
= 'KEYWORD TAGS'¶
-
LIBRARY
= 'LIBRARY'¶
-
METADATA
= 'METADATA'¶
-
NAME
= 'NAME'¶
-
NON_DATA_TOKENS
= frozenset({'EOL', 'CONTINUATION', 'EOS', 'COMMENT', 'SEPARATOR'})¶
-
OPTION
= 'OPTION'¶
-
RESOURCE
= 'RESOURCE'¶
-
RETURN
= 'RETURN'¶
-
RETURN_SETTING
= 'RETURN'¶
-
RETURN_STATEMENT
= 'RETURN STATEMENT'¶
-
SEPARATOR
= 'SEPARATOR'¶
-
SETTING_HEADER
= 'SETTING HEADER'¶
-
SETTING_TOKENS
= frozenset({'RESOURCE', 'KEYWORD TAGS', 'DOCUMENTATION', 'DEFAULT TAGS', 'RETURN', 'VARIABLES', 'ARGUMENTS', 'TEARDOWN', 'SETUP', 'TEST TEARDOWN', 'TEST TEMPLATE', 'TEST SETUP', 'TAGS', 'TEST TIMEOUT', 'SUITE SETUP', 'METADATA', 'TEMPLATE', 'FORCE TAGS', 'TIMEOUT', 'SUITE TEARDOWN', 'LIBRARY'})¶
-
SETUP
= 'SETUP'¶
-
SUITE_SETUP
= 'SUITE SETUP'¶
-
SUITE_TEARDOWN
= 'SUITE TEARDOWN'¶
-
TAGS
= 'TAGS'¶
-
TASK_HEADER
= 'TASK HEADER'¶
-
TEARDOWN
= 'TEARDOWN'¶
-
TEMPLATE
= 'TEMPLATE'¶
-
TESTCASE_HEADER
= 'TESTCASE HEADER'¶
-
TESTCASE_NAME
= 'TESTCASE NAME'¶
-
TEST_SETUP
= 'TEST SETUP'¶
-
TEST_TEARDOWN
= 'TEST TEARDOWN'¶
-
TEST_TEMPLATE
= 'TEST TEMPLATE'¶
-
TEST_TIMEOUT
= 'TEST TIMEOUT'¶
-
TIMEOUT
= 'TIMEOUT'¶
-
TRY
= 'TRY'¶
-
VARIABLE
= 'VARIABLE'¶
-
VARIABLES
= 'VARIABLES'¶
-
VARIABLE_HEADER
= 'VARIABLE HEADER'¶
-
WHILE
= 'WHILE'¶
-
WITH_NAME
= 'WITH NAME'¶
-
col_offset
¶
-
end_col_offset
¶
-
error
¶
-
lineno
¶
-
set_error
(error, fatal=False)¶
-
tokenize_variables
()¶ Tokenizes possible variables in token value.
Yields the token itself if the token does not allow variables (see
Token.ALLOW_VARIABLES
) or its value does not contain variables. Otherwise yields variable tokens as well as tokens before, after, or between variables so that they have the same type as the original token.
-
type
¶
-
value
¶
-