WONKY



LOG | FILES | OVERVIEW


F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c --- a/src/frontend/lex/lex_preprocessing_directive.c +++ b/src/frontend/lex/lex_preprocessing_directive.c
case PKW_DEFINE:
return preprocessing_lex_define_directive(lexer_data,token_location);
case PKW_UNDEF:
- return get_error_token("PREPROCESSING UNDEF NOT DONE",token_location,lexer_data->previous_token_location,lexer_data->program);
+ return preprocessing_lex_undef_directive(lexer_data,token_location);
case PKW_LINE:
return preprocessing_lex_line_directive(lexer_data,token_location);
case PKW_ERROR:
return (struct token*)ret;
}
+ struct token* preprocessing_lex_undef_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
+ {
+ struct token_undef_directive *ret;
+ struct token *hold_token;
+ ret=wonky_malloc(sizeof(struct token_undef_directive));
+ ret->type=PKW_UNDEF;
+ ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->id=NULL;
+
+ hold_token=preprocessing_extract_next_token(lexer_data);
+
+ if(hold_token==NULL || !token_is_identifier_in_preprocessing(hold_token))
+ {
+ push_lexing_error("Expected id in #undef directive",lexer_data);
+ }else
+ {
+ if(token_is_a_special_macro(hold_token))
+ {
+ push_lexing_error("Special macro id in #undef directive is not allowed",lexer_data);
+ }else
+ {
+ ret->id=hold_token;
+ }
+ }
+ return (struct token*)ret;
+ }
void preprocessing_push_tokens_into_queue_until_eol(struct Lexer_Data *lexer_data,struct Queue *queue)
{
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c --- a/src/semantics/program/translation_unit.c +++ b/src/semantics/program/translation_unit.c
}
void token_ptr_execute_undef_directive(struct Token_Pointer *ptr,struct token_undef_directive *undef_directive)
{
+ struct identifier *id;
token_ptr_goto_next_token(ptr);
+ if(undef_directive->id->type==KW_ID)
+ {
+ id=((struct token_identifier*)undef_directive->id)->id;
+ }else if(token_is_keyword(undef_directive->id))
+ {
+ id=((struct token_keyword*)undef_directive->id)->id;
+ }else
+ {
+ wonky_assert(SHOULD_NOT_REACH_HERE);
+ }
+ if(id!=NULL)
+ {
+ id->number_of_translation_unit_where_id_was_last_defined_as_a_macro=0;
+ }
}
void token_ptr_execute_line_directive(struct Token_Pointer *ptr,struct token_line_directive *line_directive)
{
F diff --git a/src/syntax/token/token.c b/src/syntax/token/token.c --- a/src/syntax/token/token.c +++ b/src/syntax/token/token.c
if(token->type==KW_ID)
return id_is_a_macro(((struct token_identifier*)token)->id,macro_expansion_number,translation_unit_number);
+ else if(token_is_keyword(token))
+ return id_is_a_macro(((struct token_keyword*)token)->id,macro_expansion_number,translation_unit_number);
else
return token_is_a_special_macro(token) || token_is_keyword(token);
}
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h --- a/src/syntax/token/token.h +++ b/src/syntax/token/token.h
{
enum LEXER_TYPE type;
struct Source_Location_Delta *delta;
- struct identifier *id;
+ struct token *id; /*keywords are also identifiers in preprocessing*/
};
struct token_line_directive
{