Skip to content

[3.9] Backport GH-20370 and GH-20436: Soft keywords #20458

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 27, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 42 additions & 0 deletions Lib/test/test_peg_generator/test_c_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,3 +402,45 @@ def test_error_in_rules(self) -> None:
parse.parse_string("a", mode=0)
"""
self.run_test(grammar_source, test_source)

def test_no_soft_keywords(self) -> None:
grammar_source = """
start: expr+ NEWLINE? ENDMARKER
expr: 'foo'
"""
grammar = parse_string(grammar_source, GrammarParser)
parser_source = generate_c_parser_source(grammar)
assert "expect_soft_keyword" not in parser_source

def test_soft_keywords(self) -> None:
grammar_source = """
start: expr+ NEWLINE? ENDMARKER
expr: "foo"
"""
grammar = parse_string(grammar_source, GrammarParser)
parser_source = generate_c_parser_source(grammar)
assert "expect_soft_keyword" in parser_source

def test_soft_keywords_parse(self) -> None:
grammar_source = """
start: "if" expr '+' expr NEWLINE
expr: NAME
"""
test_source = """
valid_cases = ["if if + if"]
invalid_cases = ["if if"]
self.check_input_strings_for_grammar(valid_cases, invalid_cases)
"""
self.run_test(grammar_source, test_source)

def test_soft_keywords_lookahead(self) -> None:
grammar_source = """
start: &"if" "if" expr '+' expr NEWLINE
expr: NAME
"""
test_source = """
valid_cases = ["if if + if"]
invalid_cases = ["if if"]
self.check_input_strings_for_grammar(valid_cases, invalid_cases)
"""
self.run_test(grammar_source, test_source)
34 changes: 33 additions & 1 deletion Parser/pegen/pegen.c
Original file line number Diff line number Diff line change
Expand Up @@ -708,7 +708,6 @@ _PyPegen_is_memoized(Parser *p, int type, void *pres)
return 0;
}


int
_PyPegen_lookahead_with_name(int positive, expr_ty (func)(Parser *), Parser *p)
{
Expand All @@ -718,6 +717,15 @@ _PyPegen_lookahead_with_name(int positive, expr_ty (func)(Parser *), Parser *p)
return (res != NULL) == positive;
}

int
_PyPegen_lookahead_with_string(int positive, expr_ty (func)(Parser *, const char*), Parser *p, const char* arg)
{
int mark = p->mark;
void *res = func(p, arg);
p->mark = mark;
return (res != NULL) == positive;
}

int
_PyPegen_lookahead_with_int(int positive, Token *(func)(Parser *, int), Parser *p, int arg)
{
Expand Down Expand Up @@ -753,6 +761,30 @@ _PyPegen_expect_token(Parser *p, int type)
return t;
}

expr_ty
_PyPegen_expect_soft_keyword(Parser *p, const char *keyword)
{
if (p->mark == p->fill) {
if (_PyPegen_fill_token(p) < 0) {
p->error_indicator = 1;
return NULL;
}
}
Token *t = p->tokens[p->mark];
if (t->type != NAME) {
return NULL;
}
char* s = PyBytes_AsString(t->bytes);
if (!s) {
p->error_indicator = 1;
return NULL;
}
if (strcmp(s, keyword) != 0) {
return NULL;
}
return _PyPegen_name_token(p);
}

Token *
_PyPegen_get_last_nonnwhitespace_token(Parser *p)
{
Expand Down
2 changes: 2 additions & 0 deletions Parser/pegen/pegen.h
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,12 @@ int _PyPegen_update_memo(Parser *p, int mark, int type, void *node);
int _PyPegen_is_memoized(Parser *p, int type, void *pres);

int _PyPegen_lookahead_with_name(int, expr_ty (func)(Parser *), Parser *);
int _PyPegen_lookahead_with_string(int , expr_ty (func)(Parser *, const char*), Parser *, const char*);
int _PyPegen_lookahead_with_int(int, Token *(func)(Parser *, int), Parser *, int);
int _PyPegen_lookahead(int, void *(func)(Parser *), Parser *);

Token *_PyPegen_expect_token(Parser *p, int type);
expr_ty _PyPegen_expect_soft_keyword(Parser *p, const char *keyword);
Token *_PyPegen_get_last_nonnwhitespace_token(Parser *);
int _PyPegen_fill_token(Parser *p);
expr_ty _PyPegen_name_token(Parser *p);
Expand Down
33 changes: 28 additions & 5 deletions Tools/peg_generator/pegen/c_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ class NodeTypes(Enum):
STRING_TOKEN = 2
GENERIC_TOKEN = 3
KEYWORD = 4
CUT_OPERATOR = 5
SOFT_KEYWORD = 5
CUT_OPERATOR = 6


BASE_NODETYPES = {
Expand Down Expand Up @@ -117,6 +118,16 @@ def keyword_helper(self, keyword: str) -> FunctionCall:
comment=f"token='{keyword}'",
)

def soft_keyword_helper(self, value: str) -> FunctionCall:
return FunctionCall(
assigned_variable="_keyword",
function="_PyPegen_expect_soft_keyword",
arguments=["p", value],
return_type="expr_ty",
nodetype=NodeTypes.SOFT_KEYWORD,
comment=f"soft_keyword='{value}'",
)

def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall:
name = node.value
if name in self.non_exact_tokens:
Expand Down Expand Up @@ -154,7 +165,10 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall:
def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall:
val = ast.literal_eval(node.value)
if re.match(r"[a-zA-Z_]\w*\Z", val): # This is a keyword
return self.keyword_helper(val)
if node.value.endswith("'"):
return self.keyword_helper(val)
else:
return self.soft_keyword_helper(node.value)
else:
assert val in self.exact_tokens, f"{node.value} is not a known literal"
type = self.exact_tokens[val]
Expand Down Expand Up @@ -204,6 +218,12 @@ def lookahead_call_helper(self, node: Lookahead, positive: int) -> FunctionCall:
arguments=[positive, call.function, *call.arguments],
return_type="int",
)
elif call.nodetype == NodeTypes.SOFT_KEYWORD:
return FunctionCall(
function=f"_PyPegen_lookahead_with_string",
arguments=[positive, call.function, *call.arguments],
return_type="int",
)
elif call.nodetype in {NodeTypes.GENERIC_TOKEN, NodeTypes.KEYWORD}:
return FunctionCall(
function=f"_PyPegen_lookahead_with_int",
Expand Down Expand Up @@ -656,8 +676,9 @@ def handle_alt_normal(self, node: Alt, is_gather: bool, rulename: Optional[str])
self.print("{")
# We have parsed successfully all the conditions for the option.
with self.indent():
node_str = str(node).replace('"', '\\"')
self.print(
f'D(fprintf(stderr, "%*c+ {rulename}[%d-%d]: %s succeeded!\\n", p->level, \' \', _mark, p->mark, "{node}"));'
f'D(fprintf(stderr, "%*c+ {rulename}[%d-%d]: %s succeeded!\\n", p->level, \' \', _mark, p->mark, "{node_str}"));'
)
# Prepare to emmit the rule action and do so
if node.action and "EXTRA" in node.action:
Expand Down Expand Up @@ -710,8 +731,9 @@ def visit_Alt(
self.print(f"{{ // {node}")
with self.indent():
self._check_for_errors()
node_str = str(node).replace('"', '\\"')
self.print(
f'D(fprintf(stderr, "%*c> {rulename}[%d-%d]: %s\\n", p->level, \' \', _mark, p->mark, "{node}"));'
f'D(fprintf(stderr, "%*c> {rulename}[%d-%d]: %s\\n", p->level, \' \', _mark, p->mark, "{node_str}"));'
)
# Prepare variable declarations for the alternative
vars = self.collect_vars(node)
Expand All @@ -733,9 +755,10 @@ def visit_Alt(
self.handle_alt_normal(node, is_gather, rulename)

self.print("p->mark = _mark;")
node_str = str(node).replace('"', '\\"')
self.print(
f"D(fprintf(stderr, \"%*c%s {rulename}[%d-%d]: %s failed!\\n\", p->level, ' ',\n"
f' p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "{node}"));'
f' p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "{node_str}"));'
)
if "_cut_var" in vars:
self.print("if (_cut_var) {")
Expand Down