Skip to content

Commit 10bbb04

Browse files
pekkaklarckyanne
authored andcommitted
minor lexer/splitter cleanup
1 parent ef3e50c commit 10bbb04

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

src/robot/parsing/lexer/splitter.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -43,13 +43,14 @@ def _split_line(self, line, lineno, data_only=False):
4343
splitter = self._split_from_pipes
4444
columnno = 1
4545
data, sepa = Token.DATA, Token.SEPARATOR
46-
trailing_whitespace = self._trailing_whitespace.search(line)
4746
for value, is_data in splitter(line.rstrip()):
4847
if is_data or not data_only:
4948
yield Token(data if is_data else sepa, value, lineno, columnno)
5049
columnno += len(value)
51-
if trailing_whitespace and not data_only:
52-
yield Token(sepa, trailing_whitespace.group(), lineno, columnno)
50+
if not data_only:
51+
trailing_whitespace = re.search(r'\s+$', line, flags=re.UNICODE)
52+
if trailing_whitespace:
53+
yield Token(sepa, trailing_whitespace.group(), lineno, columnno)
5354

5455
def _split_from_spaces(self, line):
5556
for index, value in enumerate(self._space_splitter.split(line)):
@@ -101,15 +102,14 @@ def _remove_trailing_empty(self, tokens):
101102
if not token.value:
102103
tokens.remove(token)
103104
elif token.type == token.DATA:
104-
return
105+
break
105106

106107
def _remove_leading_empty(self, tokens):
107-
# TODO: dropwhile - also above
108108
for token in list(tokens):
109109
if not token.value:
110110
tokens.remove(token)
111111
elif token.type in (token.DATA, token.CONTINUATION):
112-
return
112+
break
113113

114114
def _ensure_data_after_continuation(self, tokens):
115115
if not any(t.type == t.DATA for t in tokens):

0 commit comments

Comments
 (0)