# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
from typing import List
from robot.errors import DataError
from robot.utils import normalize_whitespace
from robot.variables import is_assign
from .context import FileContext, LexingContext, KeywordContext, TestCaseContext
from .tokens import StatementTokens, Token
[docs]class Lexer(ABC):
def __init__(self, ctx: LexingContext):
self.ctx = ctx
[docs] def handles(self, statement: StatementTokens) -> bool:
return True
[docs] @abstractmethod
def accepts_more(self, statement: StatementTokens) -> bool:
raise NotImplementedError
[docs] @abstractmethod
def lex(self):
raise NotImplementedError
[docs]class StatementLexer(Lexer, ABC):
token_type: str
def __init__(self, ctx: LexingContext):
super().__init__(ctx)
self.statement: StatementTokens = []
[docs] def accepts_more(self, statement: StatementTokens) -> bool:
return False
[docs] def lex(self):
raise NotImplementedError
def _lex_options(self, *names: str, end_index: 'int|None' = None):
for token in reversed(self.statement[:end_index]):
if not token.value.startswith(names):
break
token.type = Token.OPTION
[docs]class SingleType(StatementLexer, ABC):
[docs] def lex(self):
for token in self.statement:
token.type = self.token_type
[docs]class TypeAndArguments(StatementLexer, ABC):
[docs] def lex(self):
self.statement[0].type = self.token_type
for token in self.statement[1:]:
token.type = Token.ARGUMENT
[docs]class SettingLexer(StatementLexer):
ctx: FileContext
[docs] def lex(self):
self.ctx.lex_setting(self.statement)
[docs]class TestCaseSettingLexer(StatementLexer):
ctx: TestCaseContext
[docs] def lex(self):
self.ctx.lex_setting(self.statement)
[docs] def handles(self, statement: StatementTokens) -> bool:
marker = statement[0].value
return bool(marker and marker[0] == '[' and marker[-1] == ']')
[docs]class KeywordSettingLexer(StatementLexer):
ctx: KeywordContext
[docs] def lex(self):
self.ctx.lex_setting(self.statement)
[docs] def handles(self, statement: StatementTokens) -> bool:
marker = statement[0].value
return bool(marker and marker[0] == '[' and marker[-1] == ']')
[docs]class VariableLexer(TypeAndArguments):
ctx: FileContext
token_type = Token.VARIABLE
[docs]class KeywordCallLexer(StatementLexer):
ctx: 'TestCaseContext|KeywordContext'
[docs] def lex(self):
if self.ctx.template_set:
self._lex_as_template()
else:
self._lex_as_keyword_call()
def _lex_as_template(self):
for token in self.statement:
token.type = Token.ARGUMENT
def _lex_as_keyword_call(self):
keyword_seen = False
for token in self.statement:
if keyword_seen:
token.type = Token.ARGUMENT
elif is_assign(token.value, allow_assign_mark=True, allow_items=True):
token.type = Token.ASSIGN
else:
token.type = Token.KEYWORD
keyword_seen = True
[docs]class EndLexer(TypeAndArguments):
token_type = Token.END
[docs] def handles(self, statement: StatementTokens) -> bool:
return statement[0].value == 'END'
[docs]class ReturnLexer(TypeAndArguments):
token_type = Token.RETURN_STATEMENT
[docs] def handles(self, statement: StatementTokens) -> bool:
return statement[0].value == 'RETURN'
[docs]class ContinueLexer(TypeAndArguments):
token_type = Token.CONTINUE
[docs] def handles(self, statement: StatementTokens) -> bool:
return statement[0].value == 'CONTINUE'
[docs]class BreakLexer(TypeAndArguments):
token_type = Token.BREAK
[docs] def handles(self, statement: StatementTokens) -> bool:
return statement[0].value == 'BREAK'
[docs]class SyntaxErrorLexer(TypeAndArguments):
token_type = Token.ERROR
[docs] def handles(self, statement: StatementTokens) -> bool:
return statement[0].value in {'ELSE', 'ELSE IF', 'EXCEPT', 'FINALLY',
'BREAK', 'CONTINUE', 'RETURN', 'END'}
[docs] def lex(self):
token = self.statement[0]
token.set_error(f'{token.value} is not allowed in this context.')
for t in self.statement[1:]:
t.type = Token.ARGUMENT