robot.parsing.lexer package

Submodules

robot.parsing.lexer.blocklexers module

class robot.parsing.lexer.blocklexers.BlockLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.Lexer

accepts_more(statement)[source]
input(statement)[source]
lexer_for(statement)[source]
lexer_classes()[source]
lex()[source]
handles(statement)
class robot.parsing.lexer.blocklexers.FileLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.BlockLexer

lex()[source]
lexer_classes()[source]
accepts_more(statement)
handles(statement)
input(statement)
lexer_for(statement)
class robot.parsing.lexer.blocklexers.SectionLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.BlockLexer

accepts_more(statement)[source]
handles(statement)
input(statement)
lex()
lexer_classes()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.SettingSectionLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.SectionLexer

handles(statement)[source]
lexer_classes()[source]
accepts_more(statement)
input(statement)
lex()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.VariableSectionLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.SectionLexer

handles(statement)[source]
lexer_classes()[source]
accepts_more(statement)
input(statement)
lex()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.TestCaseSectionLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.SectionLexer

handles(statement)[source]
lexer_classes()[source]
accepts_more(statement)
input(statement)
lex()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.KeywordSectionLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.SettingSectionLexer

handles(statement)[source]
lexer_classes()[source]
accepts_more(statement)
input(statement)
lex()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.CommentSectionLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.SectionLexer

handles(statement)[source]
lexer_classes()[source]
accepts_more(statement)
input(statement)
lex()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.ImplicitCommentSectionLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.SectionLexer

handles(statement)[source]
lexer_classes()[source]
accepts_more(statement)
input(statement)
lex()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.ErrorSectionLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.SectionLexer

handles(statement)[source]
lexer_classes()[source]
accepts_more(statement)
input(statement)
lex()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.TestOrKeywordLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.BlockLexer

name_type = NotImplemented
accepts_more(statement)[source]
input(statement)[source]
lexer_classes()[source]
handles(statement)
lex()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.TestCaseLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.TestOrKeywordLexer

name_type = 'TESTCASE_NAME'
lex()[source]
accepts_more(statement)
handles(statement)
input(statement)
lexer_classes()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.KeywordLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.TestOrKeywordLexer

name_type = 'KEYWORD_NAME'
accepts_more(statement)
handles(statement)
input(statement)
lex()
lexer_classes()
lexer_for(statement)
class robot.parsing.lexer.blocklexers.ForLoopLexer(ctx)[source]

Bases: robot.parsing.lexer.blocklexers.BlockLexer

handles(statement)[source]
accepts_more(statement)[source]
input(statement)[source]
lexer_classes()[source]
lex()
lexer_for(statement)

robot.parsing.lexer.context module

class robot.parsing.lexer.context.LexingContext(settings=None)[source]

Bases: object

settings_class = None
lex_setting(statement)[source]
class robot.parsing.lexer.context.FileContext(settings=None)[source]

Bases: robot.parsing.lexer.context.LexingContext

sections_class = None
setting_section(statement)[source]
variable_section(statement)[source]
test_case_section(statement)[source]
keyword_section(statement)[source]
comment_section(statement)[source]
keyword_context()[source]
lex_invalid_section(statement)[source]
lex_setting(statement)
settings_class = None
class robot.parsing.lexer.context.TestCaseFileContext(settings=None)[source]

Bases: robot.parsing.lexer.context.FileContext

sections_class

alias of robot.parsing.lexer.sections.TestCaseFileSections

settings_class

alias of robot.parsing.lexer.settings.TestCaseFileSettings

test_case_context()[source]
comment_section(statement)
keyword_context()
keyword_section(statement)
lex_invalid_section(statement)
lex_setting(statement)
setting_section(statement)
test_case_section(statement)
variable_section(statement)
class robot.parsing.lexer.context.ResourceFileContext(settings=None)[source]

Bases: robot.parsing.lexer.context.FileContext

sections_class

alias of robot.parsing.lexer.sections.ResourceFileSections

settings_class

alias of robot.parsing.lexer.settings.ResourceFileSettings

comment_section(statement)
keyword_context()
keyword_section(statement)
lex_invalid_section(statement)
lex_setting(statement)
setting_section(statement)
test_case_section(statement)
variable_section(statement)
class robot.parsing.lexer.context.InitFileContext(settings=None)[source]

Bases: robot.parsing.lexer.context.FileContext

sections_class

alias of robot.parsing.lexer.sections.InitFileSections

settings_class

alias of robot.parsing.lexer.settings.InitFileSettings

comment_section(statement)
keyword_context()
keyword_section(statement)
lex_invalid_section(statement)
lex_setting(statement)
setting_section(statement)
test_case_section(statement)
variable_section(statement)
class robot.parsing.lexer.context.TestCaseContext(settings=None)[source]

Bases: robot.parsing.lexer.context.LexingContext

template_set
lex_setting(statement)
settings_class = None
class robot.parsing.lexer.context.KeywordContext(settings=None)[source]

Bases: robot.parsing.lexer.context.LexingContext

template_set
lex_setting(statement)
settings_class = None

robot.parsing.lexer.lexer module

robot.parsing.lexer.lexer.get_tokens(source, data_only=False, tokenize_variables=False)[source]

Parses the given source to tokens.

Parameters:
  • source – The source where to read the data. Can be a path to a source file as a string or as pathlib.Path object, an already opened file object, or Unicode text containing the date directly. Source files must be UTF-8 encoded.
  • data_only – When False (default), returns all tokens. When set to True, omits separators, comments, continuation markers, and other non-data tokens.
  • tokenize_variables – When True, possible variables in keyword arguments and elsewhere are tokenized. See the tokenize_variables() method for details.

Returns a generator that yields Token instances.

robot.parsing.lexer.lexer.get_resource_tokens(source, data_only=False, tokenize_variables=False)[source]

Parses the given source to resource file tokens.

Otherwise same as get_tokens() but the source is considered to be a resource file. This affects, for example, what settings are valid.

robot.parsing.lexer.lexer.get_init_tokens(source, data_only=False, tokenize_variables=False)[source]

Parses the given source to init file tokens.

Otherwise same as get_tokens() but the source is considered to be a suite initialization file. This affects, for example, what settings are valid.

class robot.parsing.lexer.lexer.Lexer(ctx, data_only=False, tokenize_variables=False)[source]

Bases: object

input(source)[source]
get_tokens()[source]

robot.parsing.lexer.sections module

class robot.parsing.lexer.sections.Sections[source]

Bases: object

setting_markers = ('Settings', 'Setting')
variable_markers = ('Variables', 'Variable')
test_case_markers = ('Test Cases', 'Test Case', 'Tasks', 'Task')
keyword_markers = ('Keywords', 'Keyword')
comment_markers = ('Comments', 'Comment')
setting(statement)[source]
variable(statement)[source]
test_case(statement)[source]
keyword(statement)[source]
comment(statement)[source]
lex_invalid(statement)[source]
class robot.parsing.lexer.sections.TestCaseFileSections[source]

Bases: robot.parsing.lexer.sections.Sections

test_case(statement)[source]
comment(statement)
comment_markers = ('Comments', 'Comment')
keyword(statement)
keyword_markers = ('Keywords', 'Keyword')
lex_invalid(statement)
setting(statement)
setting_markers = ('Settings', 'Setting')
test_case_markers = ('Test Cases', 'Test Case', 'Tasks', 'Task')
variable(statement)
variable_markers = ('Variables', 'Variable')
class robot.parsing.lexer.sections.ResourceFileSections[source]

Bases: robot.parsing.lexer.sections.Sections

comment(statement)
comment_markers = ('Comments', 'Comment')
keyword(statement)
keyword_markers = ('Keywords', 'Keyword')
lex_invalid(statement)
setting(statement)
setting_markers = ('Settings', 'Setting')
test_case(statement)
test_case_markers = ('Test Cases', 'Test Case', 'Tasks', 'Task')
variable(statement)
variable_markers = ('Variables', 'Variable')
class robot.parsing.lexer.sections.InitFileSections[source]

Bases: robot.parsing.lexer.sections.Sections

comment(statement)
comment_markers = ('Comments', 'Comment')
keyword(statement)
keyword_markers = ('Keywords', 'Keyword')
lex_invalid(statement)
setting(statement)
setting_markers = ('Settings', 'Setting')
test_case(statement)
test_case_markers = ('Test Cases', 'Test Case', 'Tasks', 'Task')
variable(statement)
variable_markers = ('Variables', 'Variable')

robot.parsing.lexer.settings module

class robot.parsing.lexer.settings.Settings[source]

Bases: object

names = ()
aliases = {}
multi_use = ('Metadata', 'Library', 'Resource', 'Variables')
single_value = ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')
name_and_arguments = ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')
name_arguments_and_with_name = ('Library',)
lex(statement)[source]
class robot.parsing.lexer.settings.TestCaseFileSettings[source]

Bases: robot.parsing.lexer.settings.Settings

names = ('Documentation', 'Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Test Timeout', 'Force Tags', 'Default Tags', 'Library', 'Resource', 'Variables')
aliases = {'Task Setup': 'Test Setup', 'Task Teardown': 'Test Teardown', 'Task Template': 'Test Template', 'Task Timeout': 'Test Timeout'}
lex(statement)
multi_use = ('Metadata', 'Library', 'Resource', 'Variables')
name_and_arguments = ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')
name_arguments_and_with_name = ('Library',)
single_value = ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')
class robot.parsing.lexer.settings.InitFileSettings[source]

Bases: robot.parsing.lexer.settings.Settings

names = ('Documentation', 'Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Timeout', 'Force Tags', 'Library', 'Resource', 'Variables')
aliases = {}
lex(statement)
multi_use = ('Metadata', 'Library', 'Resource', 'Variables')
name_and_arguments = ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')
name_arguments_and_with_name = ('Library',)
single_value = ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')
class robot.parsing.lexer.settings.ResourceFileSettings[source]

Bases: robot.parsing.lexer.settings.Settings

names = ('Documentation', 'Library', 'Resource', 'Variables')
aliases = {}
lex(statement)
multi_use = ('Metadata', 'Library', 'Resource', 'Variables')
name_and_arguments = ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')
name_arguments_and_with_name = ('Library',)
single_value = ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')
class robot.parsing.lexer.settings.TestCaseSettings(parent)[source]

Bases: robot.parsing.lexer.settings.Settings

names = ('Documentation', 'Tags', 'Setup', 'Teardown', 'Template', 'Timeout')
template_set
aliases = {}
lex(statement)
multi_use = ('Metadata', 'Library', 'Resource', 'Variables')
name_and_arguments = ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')
name_arguments_and_with_name = ('Library',)
single_value = ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')
class robot.parsing.lexer.settings.KeywordSettings[source]

Bases: robot.parsing.lexer.settings.Settings

names = ('Documentation', 'Arguments', 'Teardown', 'Timeout', 'Tags', 'Return')
aliases = {}
lex(statement)
multi_use = ('Metadata', 'Library', 'Resource', 'Variables')
name_and_arguments = ('Metadata', 'Suite Setup', 'Suite Teardown', 'Test Setup', 'Test Teardown', 'Test Template', 'Setup', 'Teardown', 'Template', 'Resource', 'Variables')
name_arguments_and_with_name = ('Library',)
single_value = ('Resource', 'Test Timeout', 'Test Template', 'Timeout', 'Template')

robot.parsing.lexer.statementlexers module

class robot.parsing.lexer.statementlexers.Lexer(ctx)[source]

Bases: object

Base class for lexers.

handles(statement)[source]
accepts_more(statement)[source]
input(statement)[source]
lex()[source]
class robot.parsing.lexer.statementlexers.StatementLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.Lexer

token_type = None
accepts_more(statement)[source]
input(statement)[source]
lex()[source]
handles(statement)
class robot.parsing.lexer.statementlexers.SectionHeaderLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.StatementLexer

handles(statement)[source]
accepts_more(statement)
input(statement)
lex()
token_type = None
class robot.parsing.lexer.statementlexers.SettingSectionHeaderLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.SectionHeaderLexer

token_type = 'SETTING_HEADER'
accepts_more(statement)
handles(statement)
input(statement)
lex()
class robot.parsing.lexer.statementlexers.VariableSectionHeaderLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.SectionHeaderLexer

token_type = 'VARIABLE_HEADER'
accepts_more(statement)
handles(statement)
input(statement)
lex()
class robot.parsing.lexer.statementlexers.TestCaseSectionHeaderLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.SectionHeaderLexer

token_type = 'TESTCASE_HEADER'
accepts_more(statement)
handles(statement)
input(statement)
lex()
class robot.parsing.lexer.statementlexers.KeywordSectionHeaderLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.SectionHeaderLexer

token_type = 'KEYWORD_HEADER'
accepts_more(statement)
handles(statement)
input(statement)
lex()
class robot.parsing.lexer.statementlexers.CommentSectionHeaderLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.SectionHeaderLexer

token_type = 'COMMENT_HEADER'
accepts_more(statement)
handles(statement)
input(statement)
lex()
class robot.parsing.lexer.statementlexers.ErrorSectionHeaderLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.SectionHeaderLexer

lex()[source]
accepts_more(statement)
handles(statement)
input(statement)
token_type = None
class robot.parsing.lexer.statementlexers.CommentLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.StatementLexer

token_type = 'COMMENT'
accepts_more(statement)
handles(statement)
input(statement)
lex()
class robot.parsing.lexer.statementlexers.SettingLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.StatementLexer

lex()[source]
accepts_more(statement)
handles(statement)
input(statement)
token_type = None
class robot.parsing.lexer.statementlexers.TestOrKeywordSettingLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.SettingLexer

handles(statement)[source]
accepts_more(statement)
input(statement)
lex()
token_type = None
class robot.parsing.lexer.statementlexers.VariableLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.StatementLexer

lex()[source]
accepts_more(statement)
handles(statement)
input(statement)
token_type = None
class robot.parsing.lexer.statementlexers.KeywordCallLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.StatementLexer

lex()[source]
accepts_more(statement)
handles(statement)
input(statement)
token_type = None
class robot.parsing.lexer.statementlexers.ForLoopHeaderLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.StatementLexer

separators = ('IN', 'IN RANGE', 'IN ENUMERATE', 'IN ZIP')
handles(statement)[source]
lex()[source]
accepts_more(statement)
input(statement)
token_type = None
class robot.parsing.lexer.statementlexers.EndLexer(ctx)[source]

Bases: robot.parsing.lexer.statementlexers.StatementLexer

handles(statement)[source]
lex()[source]
accepts_more(statement)
input(statement)
token_type = None

robot.parsing.lexer.tokenizer module

class robot.parsing.lexer.tokenizer.Tokenizer[source]

Bases: object

tokenize(data, data_only=False)[source]

robot.parsing.lexer.tokens module

class robot.parsing.lexer.tokens.Token(type=None, value='', lineno=-1, col_offset=-1, error=None)[source]

Bases: object

Token representing piece of Robot Framework data.

Each token has type, value, line number, column offset and end column offset in type, value, lineno, col_offset and end_col_offset attributes, respectively. Tokens representing error also have their error message in error attribute.

Token types are declared as class attributes.

SETTING_HEADER = 'SETTING_HEADER'
VARIABLE_HEADER = 'VARIABLE_HEADER'
TESTCASE_HEADER = 'TESTCASE_HEADER'
KEYWORD_HEADER = 'KEYWORD_HEADER'
COMMENT_HEADER = 'COMMENT_HEADER'
TESTCASE_NAME = 'TESTCASE_NAME'
KEYWORD_NAME = 'KEYWORD_NAME'
DOCUMENTATION = 'DOCUMENTATION'
SUITE_SETUP = 'SUITE_SETUP'
SUITE_TEARDOWN = 'SUITE_TEARDOWN'
METADATA = 'METADATA'
TEST_SETUP = 'TEST_SETUP'
TEST_TEARDOWN = 'TEST_TEARDOWN'
TEST_TEMPLATE = 'TEST_TEMPLATE'
TEST_TIMEOUT = 'TEST_TIMEOUT'
FORCE_TAGS = 'FORCE_TAGS'
DEFAULT_TAGS = 'DEFAULT_TAGS'
LIBRARY = 'LIBRARY'
RESOURCE = 'RESOURCE'
VARIABLES = 'VARIABLES'
SETUP = 'SETUP'
TEARDOWN = 'TEARDOWN'
TEMPLATE = 'TEMPLATE'
TIMEOUT = 'TIMEOUT'
TAGS = 'TAGS'
ARGUMENTS = 'ARGUMENTS'
RETURN = 'RETURN'
NAME = 'NAME'
VARIABLE = 'VARIABLE'
ARGUMENT = 'ARGUMENT'
ASSIGN = 'ASSIGN'
KEYWORD = 'KEYWORD'
WITH_NAME = 'WITH_NAME'
FOR = 'FOR'
FOR_SEPARATOR = 'FOR_SEPARATOR'
OLD_FOR_INDENT = 'OLD_FOR_INDENT'
END = 'END'
SEPARATOR = 'SEPARATOR'
COMMENT = 'COMMENT'
CONTINUATION = 'CONTINUATION'
EOL = 'EOL'
EOS = 'EOS'
ERROR = 'ERROR'
FATAL_ERROR = 'FATAL_ERROR'
NON_DATA_TOKENS = ('SEPARATOR', 'COMMENT', 'CONTINUATION', 'EOL', 'EOS')
SETTING_TOKENS = ('DOCUMENTATION', 'SUITE_SETUP', 'SUITE_TEARDOWN', 'METADATA', 'TEST_SETUP', 'TEST_TEARDOWN', 'TEST_TEMPLATE', 'TEST_TIMEOUT', 'FORCE_TAGS', 'DEFAULT_TAGS', 'LIBRARY', 'RESOURCE', 'VARIABLES', 'SETUP', 'TEARDOWN', 'TEMPLATE', 'TIMEOUT', 'TAGS', 'ARGUMENTS', 'RETURN')
HEADER_TOKENS = ('SETTING_HEADER', 'VARIABLE_HEADER', 'TESTCASE_HEADER', 'KEYWORD_HEADER', 'COMMENT_HEADER')
ALLOW_VARIABLES = ('NAME', 'ARGUMENT', 'TESTCASE_NAME', 'KEYWORD_NAME')
type
value
lineno
col_offset
error
end_col_offset
set_error(error, fatal=False)[source]
tokenize_variables()[source]

Tokenizes possible variables in token value.

Yields the token itself if the token does not allow variables (see Token.ALLOW_VARIABLES) or its value does not contain variables. Otherwise yields variable tokens as well as tokens before, after, or between variables so that they have the same type as the original token.

class robot.parsing.lexer.tokens.EOS(lineno=-1, col_offset=-1)[source]

Bases: robot.parsing.lexer.tokens.Token

Token representing end of statement.

classmethod from_token(token)[source]
ALLOW_VARIABLES = ('NAME', 'ARGUMENT', 'TESTCASE_NAME', 'KEYWORD_NAME')
ARGUMENT = 'ARGUMENT'
ARGUMENTS = 'ARGUMENTS'
ASSIGN = 'ASSIGN'
COMMENT = 'COMMENT'
COMMENT_HEADER = 'COMMENT_HEADER'
CONTINUATION = 'CONTINUATION'
DEFAULT_TAGS = 'DEFAULT_TAGS'
DOCUMENTATION = 'DOCUMENTATION'
END = 'END'
EOL = 'EOL'
EOS = 'EOS'
ERROR = 'ERROR'
FATAL_ERROR = 'FATAL_ERROR'
FOR = 'FOR'
FORCE_TAGS = 'FORCE_TAGS'
FOR_SEPARATOR = 'FOR_SEPARATOR'
HEADER_TOKENS = ('SETTING_HEADER', 'VARIABLE_HEADER', 'TESTCASE_HEADER', 'KEYWORD_HEADER', 'COMMENT_HEADER')
KEYWORD = 'KEYWORD'
KEYWORD_HEADER = 'KEYWORD_HEADER'
KEYWORD_NAME = 'KEYWORD_NAME'
LIBRARY = 'LIBRARY'
METADATA = 'METADATA'
NAME = 'NAME'
NON_DATA_TOKENS = ('SEPARATOR', 'COMMENT', 'CONTINUATION', 'EOL', 'EOS')
OLD_FOR_INDENT = 'OLD_FOR_INDENT'
RESOURCE = 'RESOURCE'
RETURN = 'RETURN'
SEPARATOR = 'SEPARATOR'
SETTING_HEADER = 'SETTING_HEADER'
SETTING_TOKENS = ('DOCUMENTATION', 'SUITE_SETUP', 'SUITE_TEARDOWN', 'METADATA', 'TEST_SETUP', 'TEST_TEARDOWN', 'TEST_TEMPLATE', 'TEST_TIMEOUT', 'FORCE_TAGS', 'DEFAULT_TAGS', 'LIBRARY', 'RESOURCE', 'VARIABLES', 'SETUP', 'TEARDOWN', 'TEMPLATE', 'TIMEOUT', 'TAGS', 'ARGUMENTS', 'RETURN')
SETUP = 'SETUP'
SUITE_SETUP = 'SUITE_SETUP'
SUITE_TEARDOWN = 'SUITE_TEARDOWN'
TAGS = 'TAGS'
TEARDOWN = 'TEARDOWN'
TEMPLATE = 'TEMPLATE'
TESTCASE_HEADER = 'TESTCASE_HEADER'
TESTCASE_NAME = 'TESTCASE_NAME'
TEST_SETUP = 'TEST_SETUP'
TEST_TEARDOWN = 'TEST_TEARDOWN'
TEST_TEMPLATE = 'TEST_TEMPLATE'
TEST_TIMEOUT = 'TEST_TIMEOUT'
TIMEOUT = 'TIMEOUT'
VARIABLE = 'VARIABLE'
VARIABLES = 'VARIABLES'
VARIABLE_HEADER = 'VARIABLE_HEADER'
WITH_NAME = 'WITH_NAME'
col_offset
end_col_offset
error
lineno
set_error(error, fatal=False)
tokenize_variables()

Tokenizes possible variables in token value.

Yields the token itself if the token does not allow variables (see Token.ALLOW_VARIABLES) or its value does not contain variables. Otherwise yields variable tokens as well as tokens before, after, or between variables so that they have the same type as the original token.

type
value