F diff --git a/src/environment/error/gcc_error.c b/src/environment/error/gcc_error.c
--- a/src/environment/error/gcc_error.c
+++ b/src/environment/error/gcc_error.c
struct Wonky_Message *ret;
va_start(args,msg_fmt);
- ret=get_wonky_message(type,source,line,column,filename,filename_size,msg_fmt,args);
+ ret=get_wonky_message_vargs(type,source,line,column,filename,filename_size,msg_fmt,args);
va_end(args);
return ret;
wonky_string_vprintf(ret->message,msg_fmt,args);
- ret->line=line;
- ret->column=column;
+ ret->line=line+1;
+ ret->column=column+1;
ret->filename=filename;
ret->filename_size=filename_size;
char *filename=NULL;
size_t filename_size=0;
+ struct wonky_stream s;
+ struct Wonky_Message *hold;
+
+
+
if(translation_data->token_pointer->context)
{
line=translation_data->token_pointer->context->line;
filename_size=translation_data->token_pointer->context->filename_size;
}
- Queue_Push(translation_data->program->errors,get_wonky_message_vargs(WONKY_MESSAGE_TYPE_ERROR,WONKY_MESSAGE_SOURCE_TRANSLATION,line,column,filename,filename_size,fmt,args));
+ hold=get_wonky_message_vargs(WONKY_MESSAGE_TYPE_ERROR,
+ WONKY_MESSAGE_SOURCE_TRANSLATION,
+ line,column,filename,filename_size,fmt,args);
+ s=wonky_string_stream(hold->message);
+ wonky_fseek(&s,0,SEEK_END);
+ wonky_fprintf(&s,"\n%WIC",translation_data->token_pointer);
+ Queue_Push(translation_data->program->errors,hold);
+ wonky_string_stream_delete(&s);
}
void push_translation_note(const char *fmt,struct Translation_Data *translation_data,...)
char *filename=NULL;
size_t filename_size=0;
+ struct wonky_stream s;
+ struct Wonky_Message *hold;
+
+
+
if(translation_data->token_pointer->context)
{
line=translation_data->token_pointer->context->line;
filename_size=translation_data->token_pointer->context->filename_size;
}
- Queue_Push(translation_data->program->errors,get_wonky_message_vargs(WONKY_MESSAGE_TYPE_NOTE,WONKY_MESSAGE_SOURCE_TRANSLATION,line,column,filename,filename_size,fmt,args));
+ hold=get_wonky_message_vargs(WONKY_MESSAGE_TYPE_NOTE,
+ WONKY_MESSAGE_SOURCE_TRANSLATION,
+ line,column,filename,filename_size,fmt,args);
+ s=wonky_string_stream(hold->message);
+ wonky_fprintf(&s,"\n%WIC",translation_data->token_pointer);
+ Queue_Push(translation_data->program->errors,hold);
+ wonky_string_stream_delete(&s);
}
void print_message(struct wonky_stream *out,struct Wonky_Message *msg)
{
F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c
--- a/src/frontend/lex/lex_preprocessing_directive.c
+++ b/src/frontend/lex/lex_preprocessing_directive.c
}
_Bool preprocessing_get_and_check_token(struct Lexer_Data *lexer_data,enum LEXER_TYPE token_type)
{
+ if(preprocessing_eol(lexer_data))
+ return 0;
if(lexer_data->buffer_token==NULL)
lexer_data->buffer_token=preprocessing_extract_next_token(lexer_data);
}
_Bool preprocessing_eol(struct Lexer_Data *lexer_data)
{
- return lexer_data->src->src[lexer_data->where_in_src]=='\n' || lexer_eof(lexer_data);
+ if(lexer_data->buffer_token)
+ return lexer_data->src->src[lexer_data->previous_token_location->starting_byte_index
+ +lexer_data->previous_token_location->length]=='\n';
+ else
+ return lexer_data->src->src[lexer_data->where_in_src]=='\n' || lexer_eof(lexer_data);
}
struct token *preprocessing_make_token_finishing_on_node(struct Lexer_Data *lexer_data,struct Automata_Node *finishing_node,size_t start_position,_Bool create_directive,_Bool create_defined_statement)
{
struct token_error_directive *ret;
size_t hold_start_location;
size_t hold_error_size;
+ struct wonky_stream string_stream;
hold_start_location=lexer_data->where_in_src;
preprocessing_goto_end_of_line(lexer_data);
ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
hold_error_size=lexer_data->where_in_src-hold_start_location;
- ret->error_message=wonky_malloc(sizeof("#error ")+hold_error_size);
- ret->error_message[sizeof("#error ")+hold_error_size]='\0';
+ ret->error_message=get_wonky_message(WONKY_MESSAGE_TYPE_ERROR,WONKY_MESSAGE_SOURCE_TRANSLATION,where->line,where->column,where->src->src_name->name,where->src->src_name->name_size,"#error ");
+ string_stream=wonky_string_stream(ret->error_message->message);
+ wonky_write(&string_stream,lexer_data->src->src+hold_start_location,hold_error_size);
- gmemmove(ret->error_message,"#error ",sizeof("#error ")-1);
- gmemmove(ret->error_message+sizeof("#error ")-1,lexer_data->src->src+hold_start_location,hold_error_size);
+ wonky_stream_delete(&string_stream);
return (struct token*)ret;
}
struct token* preprocessing_lex_pragma_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
F diff --git a/src/frontend/lex/lexer.c b/src/frontend/lex/lexer.c
--- a/src/frontend/lex/lexer.c
+++ b/src/frontend/lex/lexer.c
delete_lexer_data(lexer_data);
if(lexed_unit->tokens->size==0)
+ {
push_generic_error(program,"Empty translation unit");
+ return NULL;
+ }
return lexed_unit;
}
F diff --git a/src/misc/print.c b/src/misc/print.c
--- a/src/misc/print.c
+++ b/src/misc/print.c
case KW_WIDE_STRING:
wonky_fprintf(out,"STRING");
break;
+ case PKW_MACRO_ARGUMENT:
+ wonky_fprintf(out,"MACRO ARGUMENT -_(:/)_-");
+ break;
+ case PKW_HASHTAG_UNARY_OP:
+ wonky_fprintf(out,"#argument");
+ break;
+ case PKW_HASHTAG_HASHTAG_OP:
+ {
+ struct Queue_Node *it;
+ it=((struct token_hashtag_hastag_operator*)token)->operands->first;
+ if(it)
+ print_token(out,(struct token*)it->data);
+ it=it->prev;
+ for(;it;it=it->prev)
+ wonky_fprintf(out,"##%Wt",(struct token*)it->data);
+ }
+ break;
default:
wonky_fprintf(out,"NOTYPE");
}
- wonky_fprintf(out," ]");
+ wonky_fprintf(out,"]");
}
char print_tokens_of_program(struct wonky_stream *out,char **base_source_names)
src=get_source_file_from_string(*base_source_names,gstrnlen(*base_source_names,1000),program);
hold_unit=lex(src,program);
- if(program->errors->size>0)
+ if(program->errors->size>0 || hold_unit==NULL)
{
ret=1;
print_errors(out,program->errors);
F diff --git a/src/misc/wonky_stream.c b/src/misc/wonky_stream.c
--- a/src/misc/wonky_stream.c
+++ b/src/misc/wonky_stream.c
char hold[2]={0,0};
ssize_t ret=0;
ssize_t hret;
+ intmax_t d;
if(a==0)
return wonky_write(s,"0",1);
}
- while(a)
+ for(d=1;d*10<a;d*=10);
+ for(d;d>0;d/=10)
{
- hold[0]=a%10+'0';
+ hold[0]=a/d+'0';
hret=wonky_write(s,hold,1);
if(hret<0)
return -ret;
else
ret+=hret;
- a/=10;
+ a=a%d;
}
return ret;
}
char hold[2]={0,0};
ssize_t ret=0;
ssize_t hret;
+ uintmax_t d;
if(a==0)
return wonky_write(s,"0",1);
- while(a)
+ for(d=1;d*10<a;d*=10);
+ for(d;d>0;d/=10)
{
- hold[0]=a%10+'0';
+ hold[0]=a/d+'0';
hret=wonky_write(s,hold,1);
if(hret<0)
return -ret;
else
ret+=hret;
- a/=10;
+ a=a%d;
}
return ret;
}
char hold[2]={0,0};
ssize_t ret=0;
ssize_t hret;
+ uintmax_t d;
if(a==0)
return wonky_write(s,"0",1);
- while(a)
+ for(d=1;d*16<a;d*=16);
+ for(d;d>0;d/=16)
{
- if(a%16<10)
- hold[0]=a%16+'0';
+ if(a/d<10)
+ hold[0]=a/d+'0';
else
- hold[0]=a%16-10+'A';
+ hold[0]=a/d-10+'A';
hret=wonky_write(s,hold,1);
if(hret<0)
return -ret;
else
ret+=hret;
- a/=16;
+ a=a%d;
}
return ret;
}
char hold[2]={0,0};
ssize_t ret=0;
ssize_t hret;
+ uintmax_t d;
if(a==0)
return wonky_write(s,"0",1);
- while(a)
+ for(d=1;d*8<a;d*=8);
+ for(d;d>0;d/=8)
{
- hold[0]=a%8+'0';
+ hold[0]=a/d+'0';
hret=wonky_write(s,hold,1);
if(hret<0)
return -ret;
else
ret+=hret;
- a/=8;
+ a=a%d;
}
return ret;
}
case WONKY__CONVERSION_WONKY_CONSTANT:
print_constant(s,va_arg(args,struct Constant*));
break;
+ case WONKY__CONVERSION_WONKY_SOURCE_NAME:
+ {
+ struct Source_Name *sn=va_arg(args,struct Source_Name*);
+ wonky_write(s,sn->name,sn->name_size);
+ }
+ break;
+ case WONKY__CONVERSION_WONKY_INCLUSION_CHAIN:
+ {
+ struct Token_Pointer *tp=va_arg(args,struct Token_Pointer*);
+ struct Token_Pointer_Context *hold;
+ _Bool print_line=0;
+ wonky_assert(tp!=NULL);
+ for(struct Stack_Node *it=tp->call_stack->first;it;it=it->next)
+ {
+ wonky_assert(hold);
+ hold=(struct Token_Pointer_Context*)it->data;
+ if(hold->is_file_inclusion)
+ {
+ if(print_line)
+ wonky_write(s,"\n",1);
+ wonky_write(s,"\tIncluded in ",sizeof("\tIncluded in ")-1);
+ wonky_write(s,hold->filename,hold->filename_size);
+ wonky_fprintf(s,":%zu",hold->line+1);
+ print_line=1;
+ }
+
+ }
+
+ }
+ break;
}
}
%WAE - ast enum
%Wo - object
%WC - constant
+ %WS - source name
+ %WIC - inclusion chain ( Token_Pointer )
*/
void wonky__parse_scan_format(const char *begining, struct wonky__scanformat *destination)
{
case 'I':
++destination->forward_crawl;
if(destination->wonky_form)
- destination->conversion=WONKY__CONVERSION_WONKY_ID;
+ {
+ if(begining[destination->forward_crawl]=='C')
+ {
+ destination->conversion=WONKY__CONVERSION_WONKY_INCLUSION_CHAIN;
+ ++destination->forward_crawl;
+ }else
+ {
+ destination->conversion=WONKY__CONVERSION_WONKY_ID;
+ }
+ }
break;
case 'o':
++destination->forward_crawl;
case 'A':
++destination->forward_crawl;
if(destination->wonky_form)
+ {
if(begining[destination->forward_crawl]=='E')
+ {
destination->conversion=WONKY__CONVERSION_WONKY_AST_ENUM;
- else
+ ++destination->forward_crawl;
+ }else
+ {
destination->conversion=WONKY__CONVERSION_WONKY_AST;
- else
+ }
+ }else
+ {
destination->conversion=WONKY__CONVERSION_DOUBLE_HEXADECIMAL;
+ }
break;
case 'c':
++destination->forward_crawl;
if(destination->wonky_form)
destination->conversion=WONKY__CONVERSION_WONKY_CONSTANT;
break;
+ case 'S':
+ ++destination->forward_crawl;
+ if(destination->wonky_form)
+ destination->conversion=WONKY__CONVERSION_WONKY_SOURCE_NAME;
+ break;
}
}
F diff --git a/src/misc/wonky_stream.hh b/src/misc/wonky_stream.hh
--- a/src/misc/wonky_stream.hh
+++ b/src/misc/wonky_stream.hh
WONKY__CONVERSION_WONKY_TYPE,
WONKY__CONVERSION_WONKY_OBJECT,
WONKY__CONVERSION_WONKY_CONSTANT,
+ WONKY__CONVERSION_WONKY_SOURCE_NAME,
+ WONKY__CONVERSION_WONKY_INCLUSION_CHAIN,
WONKY__CONVERSION_END
};
enum wonky__scanformat_modifier
F diff --git a/src/semantics/program/program.c b/src/semantics/program/program.c
--- a/src/semantics/program/program.c
+++ b/src/semantics/program/program.c
{
hold_src=get_source_file_from_string(filename,filename_size,program);
ret=lex(hold_src,program);
+ if(ret==NULL)
+ return NULL;
Map_Push(program->preprocessing_translation_units,filename,filename_size,ret);
}
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c
--- a/src/semantics/program/translation_unit.c
+++ b/src/semantics/program/translation_unit.c
}
}
+
return hold_token;
}
struct token* token_ptr_get_token_under_pointer_inner(struct Token_Pointer *token_pointer)
hold_token=(struct token*)token_pointer->context->current_token_node->data;
wonky_assert(hold_token!=NULL);
+ token_ptr_assume_location_of_token(token_pointer,hold_token);
if(!token_ptr_do_preprocessing_stuff(token_pointer,hold_token))
return;
struct Token_Pointer *ret;
ret=wonky_malloc(sizeof(struct Token_Pointer));
- ret->context=get_token_ptr_context(unit->tokens->first,unit->tokens->size);
+ ret->context=get_token_ptr_context(unit->tokens->first,unit->tokens->size,1);
ret->call_stack=wonky_malloc(sizeof(struct Stack));
ret->state=TOKEN_POINTER_STATE_NORMAL;
ret->is_in_conditional_directive=0;
token_ptr_goto_next_token(ptr);
- token_ptr_jump_to_first(ptr,include_directive->tokens);
+ token_ptr_jump_to_first(ptr,include_directive->tokens,0);
token_ptr_set_barrier(ptr);
ptr->state=TOKEN_POINTER_STATE_PREPROCESSING;
include_name=((struct token_string*)hold_token)->constant->value;
hold_unit=program_get_translation_unit(ptr->program,include_name,gstrnlen(include_name,100));
+ if(hold_unit==NULL)
+ return;
+
if(hold_unit->tokens->first)
- token_ptr_jump_to_first(ptr,hold_unit->tokens);
+ token_ptr_jump_to_first(ptr,hold_unit->tokens,1);
else
token_ptr_goto_next_token(ptr);
}else if(hold_token->type==KW_LESS)
hold_unit=program_get_translation_unit(ptr->program,include_name,gstrnlen(include_name,100));
+ if(hold_unit==NULL)
+ return;
+
if(hold_unit->tokens->first)
- token_ptr_jump_to_first(ptr,hold_unit->tokens);
+ token_ptr_jump_to_first(ptr,hold_unit->tokens,1);
else
token_ptr_goto_next_token(ptr);
}else if(hold_token->type==KW_LESS_EQ) /*implementation defined*/
token_ptr_goto_next_token(ptr);
- token_ptr_jump_to_first(ptr,if_directive->controlling_expression);
+ token_ptr_jump_to_first(ptr,if_directive->controlling_expression,0);
dummy_data=get_dummy_translation_data_for_parsing_const_expressions(ptr);
if(evaluate_const_expression_integer(control,dummy_data)!=0)
{
- token_ptr_jump_to_first(ptr,if_directive->if_true);
+ token_ptr_jump_to_first(ptr,if_directive->if_true,0);
}else
{
if(if_directive->if_false!=NULL)
- token_ptr_jump_to_first(ptr,if_directive->if_false);
+ token_ptr_jump_to_first(ptr,if_directive->if_false,0);
}
{
token_ptr_goto_next_token(ptr);
if(token_is_a_macro(ifdef_directive->id,ptr->program->current_translation_unit_number))
- token_ptr_jump_to_first(ptr,ifdef_directive->if_defined);
+ token_ptr_jump_to_first(ptr,ifdef_directive->if_defined,0);
else
- token_ptr_jump_to_first(ptr,ifdef_directive->if_undefined);
+ token_ptr_jump_to_first(ptr,ifdef_directive->if_undefined,0);
}
void token_ptr_execute_ifndef_directive(struct Token_Pointer *ptr,struct token_ifdefndef_directive *ifndef_directive)
{
token_ptr_goto_next_token(ptr);
if(token_is_a_macro(ifndef_directive->id,ptr->program->current_translation_unit_number))
- token_ptr_jump_to_first(ptr,ifndef_directive->if_defined);
+ token_ptr_jump_to_first(ptr,ifndef_directive->if_defined,0);
else
- token_ptr_jump_to_first(ptr,ifndef_directive->if_undefined);
+ token_ptr_jump_to_first(ptr,ifndef_directive->if_undefined,0);
}
void token_ptr_execute_normal_define_directive(struct Token_Pointer *ptr,struct token_normal_define_directive *define_directive)
{
}
void token_ptr_execute_error_directive(struct Token_Pointer *ptr,struct token_error_directive *error_directive)
{
- push_generic_error(ptr->program,error_directive->error_message);
+ push_message_struct(ptr->program,error_directive->error_message);
program_stop_parsing(ptr->program);
token_ptr_goto_next_token(ptr);
}
}
void token_ptr_execute_normal_macro(struct Token_Pointer *ptr,struct normal_define_directive *macro)
{
- token_ptr_jump_to_first(ptr,macro->replacement_tokens);
+ token_ptr_jump_to_first(ptr,macro->replacement_tokens,0);
ptr->context->executed_macro_id=macro->id;
}
void token_ptr_execute_functionlike_macro(struct Token_Pointer *ptr,struct functionlike_define_directive *macro)
{
token_ptr_load_functionlike_macro_arguments_with_tokens(ptr,macro);
- token_ptr_jump_to_first(ptr,macro->replacement_tokens);
+ token_ptr_jump_to_first(ptr,macro->replacement_tokens,0);
ptr->context->executed_macro_id=macro->id;
}
void token_ptr_execute_functionlike_macro_argument(struct Token_Pointer *ptr,struct functionlike_define_directive_argument *argument)
if(argument->number_of_substitution_tokens==0)
token_ptr_goto_next_token(ptr);
else
- token_ptr_jump_to(ptr,argument->first_in_argument_substitution_tokens,argument->number_of_substitution_tokens);
+ token_ptr_jump_to_functionlike_macro_argument(ptr,argument);
+ }
+
+ void token_ptr_store_functionlike_macro_state_into_context(struct Token_Pointer_Context *ctx,struct functionlike_define_directive *macro)
+ {
+ struct Queue *save;
+ struct functionlike_define_directive_argument *hold;
+ struct Queue_Node *it;
+
+ save=wonky_malloc(sizeof(struct Queue));
+ Queue_Init(save);
+
+ for(it=macro->arguments->first;it;it=it->prev)
+ {
+ hold=wonky_malloc(sizeof(struct functionlike_define_directive_argument));
+ *hold=*(struct functionlike_define_directive_argument*)it->data;
+ Queue_Push(save,hold);
+ }
+
+ ctx->has_saved_functionlike_macro_state=1;
+ ctx->functionlike_macro_arguments_save_state=save;
+ ctx->saved_macro=macro;
+ macro->id->was_already_expanded_as_a_macro=0;
+ }
+ void token_ptr_load_functionlike_macro_state_from_context(struct Token_Pointer_Context *ctx)
+ {
+ struct Queue_Node *it1,*it2;
+ struct functionlike_define_directive *macro=ctx->saved_macro;
+
+ for(it1=ctx->functionlike_macro_arguments_save_state->first,it2=macro->arguments->first;it1&&it2;it1=it1->prev,it2=it2->prev)
+ {
+ *(struct functionlike_define_directive_argument*)it1->data=*(struct functionlike_define_directive_argument*)it2->data;
+ wonky_free(it2->data);
+ }
+ Queue_Destroy(ctx->functionlike_macro_arguments_save_state);
+
+ ctx->has_saved_functionlike_macro_state=0;
+ ctx->functionlike_macro_arguments_save_state=NULL;
+ macro->id->was_already_expanded_as_a_macro=1;
+ ctx->saved_macro=NULL;
}
+
void token_ptr_execute_stringify_functionlike_macro_argument(struct Token_Pointer *ptr,struct token_hashtag_unary_operator *op)
{
struct token_string *hold_string_token;
{
hold_token=token_ptr_get_token_under_pointer(ptr);
if(hold_token->type==KW_OPEN_NORMAL)
+ {
++open_bracket_count;
+ if(open_bracket_count)
+ ++number_of_tokens_in_argument;
+ }
else if(hold_token->type==KW_CLOSE_NORMAL)
+ {
--open_bracket_count;
+ if(open_bracket_count)
+ ++number_of_tokens_in_argument;
+ }
else if(hold_token->type==KW_COMMA && open_bracket_count==1) /*if we are at the top level ()*/
{
if(hold_argument_node==NULL)
{
wonky_free(ptr);
}
- struct Token_Pointer_Context* get_token_ptr_context(struct Queue_Node *start,size_t number_of_remaining_tokens)
+ struct Token_Pointer_Context* get_token_ptr_context(struct Queue_Node *start,size_t number_of_remaining_tokens,_Bool is_file_inclusion)
{
struct Token_Pointer_Context *ret;
struct Source_Location *hold_location;
ret->filename=hold_location->src->src_name->name;
ret->filename_size=hold_location->src->src_name->name_size;
ret->executed_macro_id=NULL;
+ ret->has_saved_functionlike_macro_state=0;
+ ret->functionlike_macro_arguments_save_state=NULL;
+ ret->saved_macro=NULL;
+ ret->is_file_inclusion=is_file_inclusion;
Queue_Init(ret->ungeted_tokens);
if(ptr->context->executed_macro_id)
ptr->context->executed_macro_id->was_already_expanded_as_a_macro=0;
+ /*
+ this was_already_expanded_as_a_macro was set to 0
+ because it technically should not have been expanded when the tokens
+ in the macro argument were expanded. reseting to 1
+ */
+ if(ptr->context->has_saved_functionlike_macro_state)
+ token_ptr_load_functionlike_macro_state_from_context(ptr->context);
delete_token_ptr_context(ptr->context);
ptr->context=Stack_Pop(ptr->call_stack);
}
void token_ptr_assume_location_of_token(struct Token_Pointer *ptr,struct token *token)
{
+ wonky_assert(token!=NULL);
ptr->context->line+=token->delta->line_offset;
ptr->context->column=token->delta->column;
}
wonky_assert(ptr && ptr->context && ptr->context->barrier);
ptr->context->barrier=0;
}
- void token_ptr_jump_to(struct Token_Pointer *ptr,struct Queue_Node *where_to,size_t number_of_remaining_tokens)
+ void token_ptr_jump_to(struct Token_Pointer *ptr,struct Queue_Node *where_to,size_t number_of_remaining_tokens,_Bool is_file_inclusion)
{
struct Token_Pointer_Context *new_context;
push_generic_error(ptr->program,"Preprocessing bounds exceeded");
return;
}
- new_context=get_token_ptr_context(where_to,number_of_remaining_tokens);
+ new_context=get_token_ptr_context(where_to,number_of_remaining_tokens,is_file_inclusion);
+ Stack_Push(ptr->call_stack,ptr->context);
+ ptr->context=new_context;
+ }
+ }
+ void token_ptr_jump_to_functionlike_macro_argument(struct Token_Pointer *ptr,struct functionlike_define_directive_argument *arg)
+ {
+ struct Token_Pointer_Context *new_context;
+
+ if(arg->first_in_argument_substitution_tokens!=NULL && arg->number_of_substitution_tokens!=0)
+ {
+ if(ptr->call_stack->size>1000)
+ {
+ push_generic_error(ptr->program,"Preprocessing bounds exceeded");
+ return;
+ }
+ new_context=get_token_ptr_context(arg->first_in_argument_substitution_tokens,arg->number_of_substitution_tokens,0);
+ token_ptr_store_functionlike_macro_state_into_context(new_context,arg->belongs_to->define);
Stack_Push(ptr->call_stack,ptr->context);
ptr->context=new_context;
}
}
- void token_ptr_jump_to_first(struct Token_Pointer *ptr,struct Queue *queue)
+ void token_ptr_jump_to_first(struct Token_Pointer *ptr,struct Queue *queue,_Bool is_file_inclusion)
{
- token_ptr_jump_to(ptr,queue->first,queue->size);
+ token_ptr_jump_to(ptr,queue->first,queue->size,is_file_inclusion);
}
void delete_token_ptr_context(struct Token_Pointer_Context *context)
{
F diff --git a/src/semantics/program/translation_unit.h b/src/semantics/program/translation_unit.h
--- a/src/semantics/program/translation_unit.h
+++ b/src/semantics/program/translation_unit.h
NULL otherwise
*/
struct identifier *executed_macro_id;
+
+ /*
+ MEGAHACK!
+ */
+ _Bool has_saved_functionlike_macro_state;
+ struct Queue *functionlike_macro_arguments_save_state;
+ struct functionlike_define_directive *saved_macro;
+
+ /*
+ bool to separate #include contexts from macro et al. ones
+ */
+ _Bool is_file_inclusion;
};
struct Token_Pointer
{
struct Token_Pointer* get_token_ptr(struct Preprocessing_Translation_Unit *unit,struct Program *program);
void delete_token_ptr(struct Token_Pointer *ptr);
- struct Token_Pointer_Context* get_token_ptr_context(struct Queue_Node *start,size_t number_of_remaining_tokens);
+ struct Token_Pointer_Context* get_token_ptr_context(struct Queue_Node *start,size_t number_of_remaining_tokens,_Bool is_file_inclusion);
void token_ptr_pop_context(struct Token_Pointer *ptr);
_Bool token_ptr_has_remaining_tokens(struct Token_Pointer *ptr);
void token_ptr_execute_functionlike_macro(struct Token_Pointer *ptr,struct functionlike_define_directive *macro);
void token_ptr_execute_functionlike_macro_argument(struct Token_Pointer *ptr,struct functionlike_define_directive_argument *argument);
+ void token_ptr_store_functionlike_macro_state_into_context(struct Token_Pointer_Context *ctx,struct functionlike_define_directive *macro);
+ void token_ptr_load_functionlike_macro_state_from_context(struct Token_Pointer_Context *ctx);
+
void token_ptr_execute_stringify_functionlike_macro_argument(struct Token_Pointer *ptr,struct token_hashtag_unary_operator *op);
void token_ptr_set_barrier(struct Token_Pointer *ptr);
void token_ptr_clear_barrier(struct Token_Pointer *ptr);
- void token_ptr_jump_to(struct Token_Pointer *ptr,struct Queue_Node *where_to,size_t number_of_remaining_tokens);
- void token_ptr_jump_to_first(struct Token_Pointer *ptr,struct Queue *queue);
+ void token_ptr_jump_to(struct Token_Pointer *ptr,struct Queue_Node *where_to,size_t number_of_remaining_tokens,_Bool is_file_inclusion);
+ void token_ptr_jump_to_functionlike_macro_argument(struct Token_Pointer *ptr,struct functionlike_define_directive_argument *arg);
+ void token_ptr_jump_to_first(struct Token_Pointer *ptr,struct Queue *queue,_Bool is_file_inclusion);
void delete_token_ptr_context(struct Token_Pointer_Context *context);
#endif
F diff --git a/src/syntax/source_file.c b/src/syntax/source_file.c
--- a/src/syntax/source_file.c
+++ b/src/syntax/source_file.c
file=fopen(filename,"r");
if(file==NULL)
{
- push_generic_error(program,"Could not open filename %s",filename);
+ push_generic_error(program,"Could not open file %s",filename);
ret->src_name=get_source_name("");
return ret;
}
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h
--- a/src/syntax/token/token.h
+++ b/src/syntax/token/token.h
{
enum LEXER_TYPE type;
struct Source_Location_Delta *delta;
- char *error_message;
+ struct Wonky_Message *error_message;
};
struct token_pragma_directive
{