Skip to content
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Pass pointers so that struct does not get copied
  • Loading branch information
lysnikolaou committed Oct 6, 2022
commit c2320ba5672eaaad1463c8138ed94a07380a7aec
26 changes: 13 additions & 13 deletions Parser/pegen.c
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,9 @@ growable_comment_array_deallocate(growable_comment_array *arr) {
}

static int
_get_keyword_or_name_type(Parser *p, struct token new_token)
_get_keyword_or_name_type(Parser *p, struct token *new_token)
{
int name_len = new_token.end_col_offset - new_token.col_offset;
int name_len = new_token->end_col_offset - new_token->col_offset;
assert(name_len > 0);

if (name_len >= p->n_keyword_lists ||
Expand All @@ -134,19 +134,19 @@ _get_keyword_or_name_type(Parser *p, struct token new_token)
return NAME;
}
for (KeywordToken *k = p->keywords[name_len]; k != NULL && k->type != -1; k++) {
if (strncmp(k->str, new_token.start, name_len) == 0) {
if (strncmp(k->str, new_token->start, name_len) == 0) {
return k->type;
}
}
return NAME;
}

static int
initialize_token(Parser *p, Token *parser_token, struct token new_token, int token_type) {
initialize_token(Parser *p, Token *parser_token, struct token *new_token, int token_type) {
assert(parser_token != NULL);

parser_token->type = (token_type == NAME) ? _get_keyword_or_name_type(p, new_token) : token_type;
parser_token->bytes = PyBytes_FromStringAndSize(new_token.start, new_token.end - new_token.start);
parser_token->bytes = PyBytes_FromStringAndSize(new_token->start, new_token->end - new_token->start);
if (parser_token->bytes == NULL) {
return -1;
}
Expand All @@ -155,13 +155,13 @@ initialize_token(Parser *p, Token *parser_token, struct token new_token, int tok
return -1;
}

parser_token->level = new_token.level;
parser_token->lineno = new_token.lineno;
parser_token->col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + new_token.col_offset
: new_token.col_offset;
parser_token->end_lineno = new_token.end_lineno;
parser_token->end_col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + new_token.end_col_offset
: new_token.end_col_offset;
parser_token->level = new_token->level;
parser_token->lineno = new_token->lineno;
parser_token->col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + new_token->col_offset
: new_token->col_offset;
parser_token->end_lineno = new_token->end_lineno;
parser_token->end_col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + new_token->end_col_offset
: new_token->end_col_offset;

p->fill += 1;

Expand Down Expand Up @@ -238,7 +238,7 @@ _PyPegen_fill_token(Parser *p)
}

Token *t = p->tokens[p->fill];
return initialize_token(p, t, new_token, type);
return initialize_token(p, t, &new_token, type);
}

#if defined(Py_DEBUG)
Expand Down