Skip to content

Commit 44e1753

Browse files
WliuWliu
authored andcommitted
Merge branch 'master' into wl-better-tokenization
2 parents 84c4815 + f86791c commit 44e1753

File tree

2 files changed

+64
-32
lines changed

2 files changed

+64
-32
lines changed

grammars/python.cson

Lines changed: 43 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -242,14 +242,10 @@
242242
'name': 'storage.modifier.async.python'
243243
'2':
244244
'name': 'storage.type.function.python'
245-
'end': '(\\))\\s*(?:(\\:)|(.*$\\n?))'
245+
'end': ':'
246246
'endCaptures':
247-
'1':
248-
'name': 'punctuation.definition.parameters.end.python'
249-
'2':
250-
'name': 'punctuation.section.function.begin.python'
251-
'3':
252-
'name': 'invalid.illegal.missing-section-begin.python'
247+
'0':
248+
'name': 'punctuation.definition.function.begin.python'
253249
'name': 'meta.function.python'
254250
'patterns': [
255251
{
@@ -263,34 +259,64 @@
263259
]
264260
}
265261
{
266-
'begin': '(\\()'
262+
'begin': '\\('
267263
'beginCaptures':
268-
'1':
264+
'0':
269265
'name': 'punctuation.definition.parameters.begin.python'
266+
'end': '\\)'
267+
'endCaptures':
268+
'0':
269+
'name': 'punctuation.definition.parameters.end.python'
270270
'contentName': 'meta.function.parameters.python'
271-
'end': '(?=\\)\\s*\\:)'
272271
'patterns': [
273272
{
274273
'include': '#line_comments'
275274
}
276275
{
277-
'include': '#keyword_arguments'
276+
# param = 3
277+
# param: int = 3
278+
'begin': '\\b([a-zA-Z_][\\w_]*)\\s*(?:(:)\\s*([a-zA-Z_][\\w_]*))?\\s*(=)\\s*'
279+
'beginCaptures':
280+
'1':
281+
'name': 'variable.parameter.function.python'
282+
'2':
283+
'name': 'punctuation.separator.python'
284+
'3':
285+
'name': 'storage.type.python'
286+
'4':
287+
'name': 'keyword.operator.assignment.python'
288+
'end': '(?!\\G)'
289+
'patterns': [
290+
{
291+
'include': '$self'
292+
}
293+
]
278294
}
279295
{
296+
# param
297+
# param: int
298+
'match': '\\b([a-zA-Z_][\\w_]*)\\s*(?:(:)\\s*([a-zA-Z_][\\w_]*))?'
280299
'captures':
281300
'1':
282301
'name': 'variable.parameter.function.python'
283302
'2':
284-
'name': 'punctuation.separator.parameters.python'
285-
'match': '\\b([a-zA-Z_][a-zA-Z_0-9]*)\\s*(?:(,)|(?=[\\n\\)]))'
303+
'name': 'punctuation.separator.python'
304+
'3':
305+
'name': 'storage.type.python'
306+
}
307+
{
308+
'match': ','
309+
'name': 'punctuation.separator.parameters.python'
286310
}
287311
]
288312
}
289313
{
290-
# No match, not at the end of the line, and no opening parentheses
291-
'begin': '(?!\\G)(?!\\s*$)(?!.*\\()'
292-
'end': '$'
293-
'name': 'invalid.illegal.missing-parameters.python'
314+
'match': '(->)\\s*([A-Za-z_][\\w_]*)(?=\\s*:)'
315+
'captures':
316+
'1':
317+
'name': 'keyword.operator.function-annotation.python'
318+
'2':
319+
'name': 'storage.type.python'
294320
}
295321
]
296322
}

spec/python-spec.coffee

Lines changed: 21 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -634,19 +634,6 @@ describe "Python grammar", ->
634634
expect(tokens[2]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python']
635635
expect(tokens[4]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python']
636636

637-
it "tokenizes functions that are missing parameters", ->
638-
{tokens} = grammar.tokenizeLine 'def test # whoops'
639-
640-
expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python']
641-
expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python']
642-
expect(tokens[3]).toEqual value: ' # whoops', scopes: ['source.python', 'meta.function.python', 'invalid.illegal.missing-parameters.python']
643-
644-
{tokens} = grammar.tokenizeLine 'def test:'
645-
646-
expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python']
647-
expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python']
648-
expect(tokens[3]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'invalid.illegal.missing-parameters.python']
649-
650637
it "tokenizes comments inside function parameters", ->
651638
{tokens} = grammar.tokenizeLine('def test(arg, # comment')
652639

@@ -677,7 +664,27 @@ describe "Python grammar", ->
677664
expect(tokens[2][5]).toEqual value: ' comment', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python']
678665
expect(tokens[3][1]).toEqual value: 'config', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python']
679666
expect(tokens[4][0]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python']
680-
expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.section.function.begin.python']
667+
expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.function.begin.python']
668+
669+
it "tokenizes a function definition with annotations", ->
670+
{tokens} = grammar.tokenizeLine 'def f(a: None, b: int = 3) -> int:'
671+
672+
expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python']
673+
expect(tokens[2]).toEqual value: 'f', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python']
674+
expect(tokens[3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python']
675+
expect(tokens[4]).toEqual value: 'a', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python']
676+
expect(tokens[5]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.python']
677+
expect(tokens[7]).toEqual value: 'None', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'storage.type.python']
678+
expect(tokens[8]).toEqual value: ',', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python']
679+
expect(tokens[10]).toEqual value: 'b', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python']
680+
expect(tokens[11]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.python']
681+
expect(tokens[13]).toEqual value: 'int', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'storage.type.python']
682+
expect(tokens[15]).toEqual value: '=', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'keyword.operator.assignment.python']
683+
expect(tokens[17]).toEqual value: '3', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'constant.numeric.integer.decimal.python']
684+
expect(tokens[18]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python']
685+
expect(tokens[20]).toEqual value: '->', scopes: ['source.python', 'meta.function.python', 'keyword.operator.function-annotation.python']
686+
expect(tokens[22]).toEqual value: 'int', scopes: ['source.python', 'meta.function.python', 'storage.type.python']
687+
expect(tokens[23]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.function.begin.python']
681688

682689
it "tokenizes complex function calls", ->
683690
{tokens} = grammar.tokenizeLine "torch.nn.BCELoss()(Variable(bayes_optimal_prob, 1, requires_grad=False), Yvar).data[0]"
@@ -758,7 +765,6 @@ describe "Python grammar", ->
758765
expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python']
759766

760767
it "tokenizes SQL inline highlighting on single line with a CTE", ->
761-
762768
{tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'')
763769

764770
expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python']

0 commit comments

Comments
 (0)