mirror of
https://gitlab.freedesktop.org/mesa/mesa.git
synced 2026-01-04 20:00:11 +01:00
Fix #if-skipping to *really* skip the skipped group.
Previously we were avoiding printing within a skipped group, but we were still evluating directives such as #define and #undef and still emitting diagnostics for things such as macro calls with the wrong number of arguments. Add a test for this and fix it with a high-priority rule in the lexer that consumes the skipped content.
This commit is contained in:
parent
96d3994881
commit
a771a40e22
4 changed files with 50 additions and 31 deletions
60
glcpp-lex.l
60
glcpp-lex.l
|
|
@ -47,6 +47,45 @@ HEXADECIMAL_INTEGER 0[xX][0-9a-fA-F]+[uU]?
|
|||
|
||||
%%
|
||||
|
||||
{HASH}if/.*\n {
|
||||
yyextra->lexing_if = 1;
|
||||
yyextra->space_tokens = 0;
|
||||
return HASH_IF;
|
||||
}
|
||||
|
||||
{HASH}elif/.*\n {
|
||||
yyextra->lexing_if = 1;
|
||||
yyextra->space_tokens = 0;
|
||||
return HASH_ELIF;
|
||||
}
|
||||
|
||||
{HASH}else/.*\n {
|
||||
yyextra->space_tokens = 0;
|
||||
return HASH_ELSE;
|
||||
}
|
||||
|
||||
{HASH}endif/.*\n {
|
||||
yyextra->space_tokens = 0;
|
||||
return HASH_ENDIF;
|
||||
}
|
||||
|
||||
/* When skipping (due to an #if 0 or similar) consume anything
|
||||
* up to a newline. We do this less priroty than any
|
||||
* #if-related directive (#if, #elif, #else, #endif), but with
|
||||
* more priority than any other directive or token to avoid
|
||||
* any side-effects from skipped content.
|
||||
*
|
||||
* We use the lexing_if flag to avoid skipping any part of an
|
||||
* if conditional expression. */
|
||||
[^\n]+/\n {
|
||||
if (yyextra->lexing_if ||
|
||||
yyextra->skip_stack == NULL ||
|
||||
yyextra->skip_stack->type == SKIP_NO_SKIP)
|
||||
{
|
||||
REJECT;
|
||||
}
|
||||
}
|
||||
|
||||
{HASH}define{HSPACE}+/{IDENTIFIER}"(" {
|
||||
yyextra->space_tokens = 0;
|
||||
return HASH_DEFINE_FUNC;
|
||||
|
|
@ -62,26 +101,6 @@ HEXADECIMAL_INTEGER 0[xX][0-9a-fA-F]+[uU]?
|
|||
return HASH_UNDEF;
|
||||
}
|
||||
|
||||
{HASH}if {
|
||||
yyextra->space_tokens = 0;
|
||||
return HASH_IF;
|
||||
}
|
||||
|
||||
{HASH}elif {
|
||||
yyextra->space_tokens = 0;
|
||||
return HASH_ELIF;
|
||||
}
|
||||
|
||||
{HASH}else {
|
||||
yyextra->space_tokens = 0;
|
||||
return HASH_ELSE;
|
||||
}
|
||||
|
||||
{HASH}endif {
|
||||
yyextra->space_tokens = 0;
|
||||
return HASH_ENDIF;
|
||||
}
|
||||
|
||||
{HASH} {
|
||||
yyextra->space_tokens = 0;
|
||||
return HASH;
|
||||
|
|
@ -163,6 +182,7 @@ HEXADECIMAL_INTEGER 0[xX][0-9a-fA-F]+[uU]?
|
|||
}
|
||||
|
||||
\n {
|
||||
yyextra->lexing_if = 0;
|
||||
return NEWLINE;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -159,19 +159,11 @@ input:
|
|||
|
||||
line:
|
||||
control_line {
|
||||
if (parser->skip_stack == NULL ||
|
||||
parser->skip_stack->type == SKIP_NO_SKIP)
|
||||
{
|
||||
printf ("\n");
|
||||
}
|
||||
printf ("\n");
|
||||
}
|
||||
| text_line {
|
||||
if (parser->skip_stack == NULL ||
|
||||
parser->skip_stack->type == SKIP_NO_SKIP)
|
||||
{
|
||||
_glcpp_parser_print_expanded_token_list (parser, $1);
|
||||
printf ("\n");
|
||||
}
|
||||
_glcpp_parser_print_expanded_token_list (parser, $1);
|
||||
printf ("\n");
|
||||
talloc_free ($1);
|
||||
}
|
||||
| expanded_line
|
||||
|
|
@ -889,6 +881,7 @@ glcpp_parser_create (void)
|
|||
parser->defines = hash_table_ctor (32, hash_table_string_hash,
|
||||
hash_table_string_compare);
|
||||
parser->active = _string_list_create (parser);
|
||||
parser->lexing_if = 0;
|
||||
parser->space_tokens = 1;
|
||||
parser->newline_as_space = 0;
|
||||
parser->in_control_line = 0;
|
||||
|
|
|
|||
1
glcpp.h
1
glcpp.h
|
|
@ -127,6 +127,7 @@ struct glcpp_parser {
|
|||
yyscan_t scanner;
|
||||
struct hash_table *defines;
|
||||
string_list_t *active;
|
||||
int lexing_if;
|
||||
int space_tokens;
|
||||
int newline_as_space;
|
||||
int in_control_line;
|
||||
|
|
|
|||
5
tests/062-if-0-skips-garbage.c
Normal file
5
tests/062-if-0-skips-garbage.c
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
#define foo(a,b)
|
||||
#if 0
|
||||
foo(bar)
|
||||
foo(
|
||||
#endif
|
||||
Loading…
Add table
Reference in a new issue