|
13 | 13 | # See the License for the specific language governing permissions and
|
14 | 14 | # limitations under the License.
|
15 | 15 |
|
| 16 | +from itertools import chain |
| 17 | + |
16 | 18 | from .context import TestCaseFileContext, ResourceFileContext
|
17 | 19 | from .lexers import FileLexer
|
18 | 20 | from .splitter import Splitter
|
@@ -45,7 +47,13 @@ def get_tokens(self):
|
45 | 47 | Token.OLD_FOR_INDENT}
|
46 | 48 | else:
|
47 | 49 | ignore = {Token.IGNORE}
|
48 |
| - for statement in self._handle_old_for(self.statements): |
| 50 | + statements = self._handle_old_for(self.statements) |
| 51 | + if not self._data_only: |
| 52 | + statements = chain.from_iterable( |
| 53 | + self._split_trailing_comment_and_empty_lines(s) |
| 54 | + for s in statements |
| 55 | + ) |
| 56 | + for statement in statements: |
49 | 57 | name_token = last_token = None
|
50 | 58 | for token in statement:
|
51 | 59 | if token.type in ignore:
|
@@ -82,6 +90,33 @@ def _get_first_data_token(self, statement):
|
82 | 90 | return token
|
83 | 91 | return None
|
84 | 92 |
|
| 93 | + def _split_trailing_comment_and_empty_lines(self, statement): |
| 94 | + lines = list(self._split_to_lines(statement)) |
| 95 | + split_statements = [] |
| 96 | + for line in reversed(lines): |
| 97 | + is_split = False |
| 98 | + for token in line: |
| 99 | + if token.type not in (token.IGNORE, token.SEPARATOR): |
| 100 | + is_split = token.type in (token.EOL, token.COMMENT) |
| 101 | + break |
| 102 | + if not is_split: |
| 103 | + break |
| 104 | + split_statements.append(line) |
| 105 | + lines.pop() |
| 106 | + yield list(chain.from_iterable(lines)) |
| 107 | + for split in reversed(split_statements): |
| 108 | + yield split |
| 109 | + |
| 110 | + def _split_to_lines(self, statement): |
| 111 | + current = [] |
| 112 | + for tok in statement: |
| 113 | + current.append(tok) |
| 114 | + if tok.type == tok.EOL: |
| 115 | + yield current |
| 116 | + current = [] |
| 117 | + if current: |
| 118 | + yield current |
| 119 | + |
85 | 120 |
|
86 | 121 | class TestCaseFileLexer(BaseLexer):
|
87 | 122 | context_class = TestCaseFileContext
|
|
0 commit comments