20
20
).findall
21
21
22
22
23
- def split_tokens (s ):
23
+ def split_tokens (sch_str ):
24
24
"""
25
25
Returns list of syntax elements with quotes and spaces stripped.
26
26
"""
27
27
parts = []
28
28
parens = 0
29
- for opar , cpar , unquoted , quoted , residue in TOKENS_FINDALL (s ):
29
+ for opar , cpar , unquoted , quoted , residue in TOKENS_FINDALL (sch_str ):
30
30
if unquoted :
31
31
parts .append (unquoted )
32
32
elif quoted :
@@ -39,42 +39,43 @@ def split_tokens(s):
39
39
parts .append (cpar )
40
40
elif residue == '$' :
41
41
if not parens :
42
- raise ValueError ("'$' outside parenthesis in %r" % (s ))
42
+ raise ValueError ("'$' outside parenthesis in %r" % (sch_str ))
43
43
else :
44
- raise ValueError (residue , s )
44
+ raise ValueError (residue , sch_str )
45
45
if parens :
46
- raise ValueError ("Unbalanced parenthesis in %r" % (s ))
46
+ raise ValueError ("Unbalanced parenthesis in %r" % (sch_str ))
47
47
return parts
48
48
49
- def extract_tokens (l ,known_tokens ):
50
- """
51
- Returns dictionary of known tokens with all values
52
- """
53
- assert l [0 ].strip ()== "(" and l [- 1 ].strip ()== ")" ,ValueError (l )
54
- result = {}
55
- result .update (known_tokens )
56
- i = 0
57
- l_len = len (l )
58
- while i < l_len :
59
- if l [i ] in result :
60
- token = l [i ]
61
- i += 1 # Consume token
62
- if i < l_len :
63
- if l [i ] in result :
64
- # non-valued
65
- result [token ] = (())
66
- elif l [i ]== "(" :
67
- # multi-valued
68
- i += 1 # Consume left parentheses
69
- start = i
70
- while i < l_len and l [i ]!= ")" :
71
- i += 1
72
- result [token ] = tuple (filter (lambda v :v != '$' ,l [start :i ]))
73
- i += 1 # Consume right parentheses
49
+ def extract_tokens (tkl , known_tokens ):
50
+ """
51
+ Returns dictionary of known tokens with all values
52
+ """
53
+ assert tkl [0 ].strip () == "(" and tkl [- 1 ].strip () == ")" , ValueError (tkl )
54
+ result = dict (known_tokens )
55
+ i = 0
56
+ l_len = len (tkl )
57
+ while i < l_len :
58
+ if tkl [i ] in result :
59
+ token = tkl [i ]
60
+ i += 1 # Consume token
61
+ if i < l_len :
62
+ if tkl [i ] in result :
63
+ # non-valued
64
+ result [token ] = (())
65
+ elif tkl [i ] == "(" :
66
+ # multi-valued
67
+ i += 1 # Consume left parentheses
68
+ start = i
69
+ while i < l_len and tkl [i ] != ")" :
70
+ i += 1
71
+ result [token ] = tuple ([
72
+ v for v in tkl [start :i ] if v != '$'
73
+ ])
74
+ i += 1 # Consume right parentheses
75
+ else :
76
+ # single-valued
77
+ result [token ] = tkl [i ],
78
+ i += 1 # Consume single value
74
79
else :
75
- # single-valued
76
- result [token ] = l [i ],
77
- i += 1 # Consume single value
78
- else :
79
- i += 1 # Consume unrecognized item
80
- return result
80
+ i += 1 # Consume unrecognized item
81
+ return result
0 commit comments