Skip to content

Commit d0f3b7b

Browse files
committed
Move where we concatenate expected tokens to handle ignoreErrorOrder
Also update the expected failures now we no longer fail a test.
1 parent e706397 commit d0f3b7b

File tree

2 files changed

+3
-4
lines changed

2 files changed

+3
-4
lines changed

html5lib/tests/expected-failures/tokenizer.dat

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,5 @@
3636
#data
3737
<!DOCTYPE
3838

39-
#data
40-
I'm &no
41-
4239
#data
4340
<!DOCTYPE

html5lib/tests/test_tokenizer.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,7 @@ def tokensMatch(expectedTokens, receivedTokens, ignoreErrorOrder,
108108
token.pop()
109109

110110
if not ignoreErrorOrder and not ignoreErrors:
111+
expectedTokens = concatenateCharacterTokens(expectedTokens)
111112
return expectedTokens == receivedTokens
112113
else:
113114
# Sort the tokens into two groups; non-parse errors and parse errors
@@ -120,6 +121,7 @@ def tokensMatch(expectedTokens, receivedTokens, ignoreErrorOrder,
120121
else:
121122
if not ignoreErrors:
122123
tokens[tokenType][1].append(token)
124+
tokens[tokenType][0] = concatenateCharacterTokens(tokens[tokenType][0])
123125
return tokens["expected"] == tokens["received"]
124126

125127

@@ -144,7 +146,7 @@ def runTokenizerTest(test):
144146
warnings.resetwarnings()
145147
warnings.simplefilter("error")
146148

147-
expected = concatenateCharacterTokens(test['output'])
149+
expected = test['output']
148150
if 'lastStartTag' not in test:
149151
test['lastStartTag'] = None
150152
parser = TokenizerTestParser(test['initialState'],

0 commit comments

Comments
 (0)