# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.utils import normalize_whitespace
from robot.variables import is_assign
from .tokens import Token
[docs]class Lexer:
"""Base class for lexers."""
def __init__(self, ctx):
self.ctx = ctx
[docs] def handles(self, statement):
return True
[docs] def accepts_more(self, statement):
raise NotImplementedError
[docs] def lex(self):
raise NotImplementedError
[docs]class StatementLexer(Lexer):
token_type = None
def __init__(self, ctx):
super().__init__(ctx)
self.statement = None
[docs] def accepts_more(self, statement):
return False
[docs] def lex(self):
raise NotImplementedError
[docs]class SingleType(StatementLexer):
[docs] def lex(self):
for token in self.statement:
token.type = self.token_type
[docs]class TypeAndArguments(StatementLexer):
[docs] def lex(self):
self.statement[0].type = self.token_type
for token in self.statement[1:]:
token.type = Token.ARGUMENT
[docs]class SettingLexer(StatementLexer):
[docs] def lex(self):
self.ctx.lex_setting(self.statement)
[docs]class TestOrKeywordSettingLexer(SettingLexer):
[docs] def handles(self, statement):
marker = statement[0].value
return marker and marker[0] == '[' and marker[-1] == ']'
[docs]class VariableLexer(TypeAndArguments):
token_type = Token.VARIABLE
[docs]class KeywordCallLexer(StatementLexer):
[docs] def lex(self):
if self.ctx.template_set:
self._lex_as_template()
else:
self._lex_as_keyword_call()
def _lex_as_template(self):
for token in self.statement:
token.type = Token.ARGUMENT
def _lex_as_keyword_call(self):
keyword_seen = False
for token in self.statement:
if keyword_seen:
token.type = Token.ARGUMENT
elif is_assign(token.value, allow_assign_mark=True):
token.type = Token.ASSIGN
else:
token.type = Token.KEYWORD
keyword_seen = True
[docs]class EndLexer(TypeAndArguments):
token_type = Token.END
[docs] def handles(self, statement):
return statement[0].value == 'END'
[docs]class ReturnLexer(TypeAndArguments):
token_type = Token.RETURN_STATEMENT
[docs] def handles(self, statement):
return statement[0].value == 'RETURN'
[docs]class ContinueLexer(TypeAndArguments):
token_type = Token.CONTINUE
[docs] def handles(self, statement):
return statement[0].value == 'CONTINUE'
[docs]class BreakLexer(TypeAndArguments):
token_type = Token.BREAK
[docs] def handles(self, statement):
return statement[0].value == 'BREAK'