WONKY



LOG | FILES | OVERVIEW


F diff --git a/src/environment/error/gcc_error.c b/src/environment/error/gcc_error.c --- a/src/environment/error/gcc_error.c +++ b/src/environment/error/gcc_error.c
case 'D':
hold_return_substring=get_string_for_denoted_error(va_arg(args,struct Denoted*));
break;
+ case 't':
+ hold_return_substring=get_string_for_token_error(va_arg(args,struct token*));
+ break;
default:
continue; /*goes to top for loop*/
}
return ret;
}
+ char* get_string_for_token_error(struct token *token)
+ {
+ char *ret;
+
+ ret=gstr_to_heap("token");
+
+ return ret;
+ }
char* get_string_for_id_error(struct identifier *id)
{
char *ret;
F diff --git a/src/environment/error/gcc_error.h b/src/environment/error/gcc_error.h --- a/src/environment/error/gcc_error.h +++ b/src/environment/error/gcc_error.h
%T print type - takes pointer to Type
%D denoted - takes pointer to Denoted
%I identifier - takes pointer to an id
+ %t token - takes a pointer to a token
*/
struct Translation_Message* get_translation_message(const char *message_format,struct Program *program,char *filename,size_t filename_size,size_t line,size_t column,va_list args);
struct Translation_Message* get_translation_message_inner(const char *message,struct Program *program,char *filename,size_t filename_size,size_t line,size_t column);
char* get_string_for_type_error(struct Type *type);
char* get_string_for_denoted_error(struct Denoted *denoted);
+ char* get_string_for_token_error(struct token *token);
char* get_string_for_id_error(struct identifier *id);
char* get_translation_message_location_prefix(char *filename,size_t line,size_t column);
F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c --- a/src/frontend/lex/lex_preprocessing_directive.c +++ b/src/frontend/lex/lex_preprocessing_directive.c
{
/*the first '##' has been eaten by the caller*/
do{
- Queue_Push(ret_q,previous_token);
hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
+ Queue_Push(ret_q,hold_token);
}while(preprocessing_get_and_check_token(lexer_data,KW_HASHTAG_HASHTAG) && !preprocessing_eol(lexer_data));
if(!preprocessing_eol(lexer_data))
Queue_Push(ret_q,previous_token);
return get_error_token("Unrecognised lexical element",get_source_location(
lexer_data->which_column,
lexer_data->which_row,
- lexer_data->where_in_src,
- lexer_data->src->src_name
+ where_does_the_token_start_in_the_source_file,
+ lexer_data->where_in_src-where_does_the_token_start_in_the_source_file,
+ lexer_data->src
),
lexer_data->previous_token_location,
lexer_data->program);
token_location=get_source_location(
lexer_data->which_row,
lexer_data->which_column,
- lexer_data->where_in_src,
- lexer_data->src->src_name
+ start_position,
+ lexer_data->where_in_src-start_position,
+ lexer_data->src
);
if(create_directive)
F diff --git a/src/frontend/lex/lexer.c b/src/frontend/lex/lexer.c --- a/src/frontend/lex/lexer.c +++ b/src/frontend/lex/lexer.c
}
+ struct token* lex_token_from_string(char *str,size_t string_length)
+ {
+ struct Automata_Node *head;
+ struct Automata_Node *follower;
+ struct token *ret;
+
+ wonky_assert(str && string_length);
+
+ head=&chonky[0];
+ follower=NULL;
+ }
void lexer_skip_white_space(struct Lexer_Data *lexer_data)
{
lexer_skip_white_space(lexer_data);
where_does_the_token_start_in_the_source_file=lexer_data->where_in_src;
- hold_node=lexer_feed_automata_until_error(lexer_data);
-
if(lexer_eof(lexer_data))
return NULL;
+ hold_node=lexer_feed_automata_until_error(lexer_data);
+
+
if(hold_node==NULL)
return get_error_token("Unrecognised lexical element",get_source_location(
lexer_data->which_column,
lexer_data->which_row,
- lexer_data->where_in_src,
- lexer_data->src->src_name
+ where_does_the_token_start_in_the_source_file,
+ lexer_data->where_in_src-where_does_the_token_start_in_the_source_file,
+ lexer_data->src
),
lexer_data->previous_token_location,
lexer_data->program);
token_location=get_source_location(
lexer_data->which_row,
lexer_data->which_column,
- lexer_data->where_in_src,
- lexer_data->src->src_name
+ start_position,
+ lexer_data->where_in_src-start_position,
+ lexer_data->src
);
switch(finishing_node->keyword)
F diff --git a/src/frontend/lex/lexer.h b/src/frontend/lex/lexer.h --- a/src/frontend/lex/lexer.h +++ b/src/frontend/lex/lexer.h
struct Preprocessing_Translation_Unit* lex_inner(struct Lexer_Data *lexer_data);
struct Lexer_Data* get_lexer_data(struct Source_File *src,struct Program *program);
+ struct token* lex_token_from_string(char *str,size_t string_length);
+
void lexer_skip_white_space(struct Lexer_Data *lexer_data);
_Bool lexer_eof(struct Lexer_Data *lexer_data);
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c --- a/src/semantics/program/translation_unit.c +++ b/src/semantics/program/translation_unit.c
{
return;
}
-
+ void delete_preprocessing_translation_unit_but_not_the_tokens(struct Preprocessing_Translation_Unit *unit)
+ {
+ return;
+ }
void push_token_into_preprocessing_translation_unit(struct Preprocessing_Translation_Unit *unit,struct token *token)
{
Queue_Push(unit->tokens,token);
while(1)
{
- if(token_pointer->context->number_of_remaining_tokens==0)
+ while(!token_ptr_has_remaining_tokens_in_current_context(token_pointer) &&
+ token_pointer->call_stack->size>0)
+ token_ptr_pop_context(token_pointer);
+
+ if(token_pointer->context==NULL || token_pointer->context->current_token_node==NULL)
return;
hold_token=(struct token*)token_pointer->context->current_token_node->data;
Stack_Init(ret->call_stack);
ret->program=program;
-
- ret->macro_expansion_number=1;
- ret->next_barrier_number=1;
return ret;
}
struct token *hold_token;
struct Preprocessing_Translation_Unit *hold_unit;
char *include_name;
- unsigned int barrier;
wonky_assert(include_directive->tokens->first);
token_ptr_jump_to_first(ptr,include_directive->tokens);
- barrier=token_ptr_set_barrier(ptr);
+ token_ptr_set_barrier(ptr);
ptr->state=TOKEN_POINTER_STATE_PREPROCESSING;
hold_token=token_ptr_get_token_under_pointer(ptr);
ptr->state=TOKEN_POINTER_STATE_NORMAL;
- wonky_assert(barrier==token_ptr_get_current_barrier_number(ptr));
token_ptr_clear_barrier(ptr);
if(hold_token->type==KW_STRING)
token_ptr_goto_next_token(ptr);
}else if(hold_token->type==KW_LESS)
{
- barrier=token_ptr_set_barrier(ptr);
+ token_ptr_set_barrier(ptr);
ptr->state=TOKEN_POINTER_STATE_PREPROCESSING;
include_name=gstrncpy("",2);
hold_token=token_ptr_get_token_under_pointer(ptr);
hold_token=token_ptr_get_token_under_pointer(ptr);
}
ptr->state=TOKEN_POINTER_STATE_NORMAL;
- wonky_assert(barrier==token_ptr_get_current_barrier_number(ptr));
token_ptr_clear_barrier(ptr);
if(ptr->context->current_token_node!=NULL)
struct AST *control;
struct Translation_Data *dummy_data;
struct Scope *dummy_scope;
- unsigned int barrier;
token_ptr_goto_next_token(ptr);
ptr->state=TOKEN_POINTER_STATE_PREPROCESSING;
ptr->is_in_conditional_directive=1;
- barrier=token_ptr_set_barrier(ptr);
+ token_ptr_set_barrier(ptr);
control=parse_expression(dummy_data,dummy_scope);
- wonky_assert(barrier==token_ptr_get_current_barrier_number(ptr));
token_ptr_clear_barrier(ptr);
ptr->is_in_conditional_directive=0;
ptr->state=TOKEN_POINTER_STATE_NORMAL;
void token_ptr_execute_ifdef_directive(struct Token_Pointer *ptr,struct token_ifdefndef_directive *ifdef_directive)
{
token_ptr_goto_next_token(ptr);
- if(token_is_a_macro(ifdef_directive->id,ptr->macro_expansion_number,ptr->program->current_translation_unit_number))
+ if(token_is_a_macro(ifdef_directive->id,ptr->program->current_translation_unit_number))
token_ptr_jump_to_first(ptr,ifdef_directive->if_defined);
else
token_ptr_jump_to_first(ptr,ifdef_directive->if_undefined);
void token_ptr_execute_ifndef_directive(struct Token_Pointer *ptr,struct token_ifdefndef_directive *ifndef_directive)
{
token_ptr_goto_next_token(ptr);
- if(token_is_a_macro(ifndef_directive->id,ptr->macro_expansion_number,ptr->program->current_translation_unit_number))
+ if(token_is_a_macro(ifndef_directive->id,ptr->program->current_translation_unit_number))
token_ptr_jump_to_first(ptr,ifndef_directive->if_defined);
else
token_ptr_jump_to_first(ptr,ifndef_directive->if_undefined);
struct token_constant *line;
token_ptr_goto_next_token(ptr);
- if(token_is_a_macro(operator->id,ptr->macro_expansion_number,ptr->program->current_translation_unit_number))
+ if(token_is_a_macro(operator->id,ptr->program->current_translation_unit_number))
{
line=(struct token_constant*)get_constant_token(
KW_DECIMAL_CONSTANT,
}
void token_ptr_execute_macro(struct Token_Pointer *ptr,struct identifier *id)
{
- id->macro_expansion_number=ptr->macro_expansion_number;
- /*the hack deepens*/
- ++ptr->macro_expansion_number;
+ id->was_already_expanded_as_a_macro=1;
token_ptr_goto_next_token(ptr);
void token_ptr_execute_normal_macro(struct Token_Pointer *ptr,struct normal_define_directive *macro)
{
token_ptr_jump_to_first(ptr,macro->replacement_tokens);
+ ptr->context->executed_macro_id=macro->id;
}
void token_ptr_execute_functionlike_macro(struct Token_Pointer *ptr,struct functionlike_define_directive *macro)
{
token_ptr_load_functionlike_macro_arguments_with_tokens(ptr,macro);
token_ptr_jump_to_first(ptr,macro->replacement_tokens);
+ ptr->context->executed_macro_id=macro->id;
}
void token_ptr_execute_functionlike_macro_argument(struct Token_Pointer *ptr,struct functionlike_define_directive_argument *argument)
{
Queue_Push(ptr->context->ungeted_tokens,hold_string_token);
}
+ struct token* token_ptr_execute_concat_two_tokens(struct token *left,struct token *right,struct Program *program)
+ {
+ #warning this is a bit slow on consecutive concatenations x##y##z##p
+ struct Source_File *temp_source_file; /*does not get freed*/
+ struct Lexer_Data *temp_lexer_data;
+ struct Preprocessing_Translation_Unit *hold_token;
+ struct token *ret;
+ size_t left_size,right_size;
+
+ left_size=left->delta->location->length;
+ right_size=right->delta->location->length;
+ if(left_size==0)
+ return right;
+ if(right_size==0)
+ return left;
+
+
+ temp_source_file=get_temp_source_file();
+ temp_source_file->src=wonky_malloc(left_size+right_size);
+ temp_source_file->src_size=left_size+right_size;
+
+ gmemmove(temp_source_file->src,
+ left->delta->location->src->src+left->delta->location->starting_byte_index,
+ left_size);
+ gmemmove(temp_source_file->src+left_size,
+ right->delta->location->src->src+right->delta->location->starting_byte_index,
+ right_size);
+
+ temp_lexer_data=get_lexer_data(temp_source_file,program);
+
+ /*
+ hack to make the lexer not 'lex' a preprocessing directive
+ while after concatenating '#' and 'include' for example
+ */
+ temp_lexer_data->is_in_the_begining_of_line=0;
+
+ hold_token=lex_inner(temp_lexer_data);
+
+
+ wonky_assert( hold_token &&
+ hold_token->tokens->size==1 &&
+ hold_token->tokens->first &&
+ hold_token->tokens->first->data);
+
+ ret=hold_token->tokens->first->data;
+
+
+
+ delete_lexer_data(temp_lexer_data);
+ delete_preprocessing_translation_unit_but_not_the_tokens(hold_token);
+
+ return ret;
+
+ }
+ /*
+ * For both object-like and function-like macro invocations, before the replacement list is
+ * reexamined for more macro names to replace, each instance of a ## preprocessing token
+ * in the replacement list (not from an argument) is deleted and the preceding preprocessing
+ * token is concatenated with the following preprocessing token. Placemarker
+ * preprocessing tokens are handled specially: concatenation of two placemarkers results in
+ * a single placemarker preprocessing token, and concatenation of a placemarker with a
+ * non-placemarker preprocessing token results in the non-placemarker preprocessing token.
+ * If the result is not a valid preprocessing token, the behavior is undefined. The resulting
+ * token is available for further macro replacement. The order of evaluation of ## operators
+ * is unspecified.
+ */
void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,struct token_hashtag_hastag_operator *op)
{
- unsigned int barrier;
+ struct Preprocessing_Translation_Unit *hold_unit;
+ struct token *hold_left,*hold_right;
+ struct Queue_Node *it;
+
+ size_t result_size=0;
- barrier=token_ptr_set_barrier(ptr);
+ token_ptr_set_barrier(ptr);
token_ptr_goto_next_token(ptr);
+ hold_left=(struct token*)op->operands->first->data;
+
+ if(hold_left->type==PKW_MACRO_ARGUMENT)
+ {
+ /*
+ * if the first token in the concatination is a macro argument chase the last token
+ * and use it for concatination
+ */
+ struct token_functionlike_define_argument *arg=(struct token_functionlike_define_argument*)hold_left;
+ if(arg->argument->number_of_substitution_tokens)
+ {
+ size_t i;
+ for(i=1,it=arg->argument->first_in_argument_substitution_tokens;i<arg->argument->number_of_substitution_tokens;
+ ++i,it=it->prev)
+ {
+ Queue_Push(ptr->context->ungeted_tokens,it->data);
+ }
+ hold_left=(struct token*)it->data;
+ }
+ }
+ for(it=op->operands->first->prev;it;it=it->prev) /*start from the second argument, the first is in hold_left*/
+ {
+ hold_right=(struct token*)it->data;
+ wonky_assert(hold_left!=NULL);
+ if(hold_right->type==PKW_MACRO_ARGUMENT)
+ {
+ struct token_functionlike_define_argument *arg=(struct token_functionlike_define_argument*)hold_right;
+ if(arg->argument->number_of_substitution_tokens>1)
+ {
+ size_t i;
+ hold_right=(struct token*)arg->argument->first_in_argument_substitution_tokens->data;
+ hold_left=token_ptr_execute_concat_two_tokens(hold_left,hold_right,ptr->program);
+ Queue_Push(ptr->context->ungeted_tokens,hold_left);
+
+ for(i=1,it=arg->argument->first_in_argument_substitution_tokens->prev;
+ i<arg->argument->number_of_substitution_tokens;
+ ++i,it=it->prev)
+ {
+ Queue_Push(ptr->context->ungeted_tokens,it->data);
+ }
+ hold_left=(struct token*)it->data;
+ }else if(arg->argument->number_of_substitution_tokens==1)
+ {
+
+ hold_left=token_ptr_execute_concat_two_tokens(hold_left,arg->argument->first_in_argument_substitution_tokens->data,ptr->program);
+ }
+ }else
+ {
+ hold_left=token_ptr_execute_concat_two_tokens(hold_left,hold_right,ptr->program);
+ }
+ }
+ Queue_Push(ptr->context->ungeted_tokens,hold_left);
- wonky_assert(barrier==token_ptr_get_current_barrier_number(ptr));
token_ptr_clear_barrier(ptr);
}
void token_ptr_load_functionlike_macro_arguments_with_tokens(struct Token_Pointer *ptr,struct functionlike_define_directive *macro)
case LT_EOF:
if(token_pointer->call_stack->size>0)
{
- token_pointer->context=Stack_Pop(token_pointer->call_stack);
+ delete_token_ptr_context(token_pointer->context);
return 1;
}else
{
hold_id_token=(struct token_identifier*)token;
if( id_is_a_macro(
hold_id_token->id,
- token_pointer->macro_expansion_number,
token_pointer->program->current_translation_unit_number
)
&&
hold_kw_token=(struct token_keyword*)token;
if( id_is_a_macro(
hold_kw_token->id,
- token_pointer->macro_expansion_number,
token_pointer->program->current_translation_unit_number
)
&&
ret->line=hold_location->line;
ret->column=hold_location->column;
- ret->filename=hold_location->src_name->name;
- ret->filename_size=hold_location->src_name->name_size;
- ret->barrier_number=0;
+ ret->filename=hold_location->src->src_name->name;
+ ret->filename_size=hold_location->src->src_name->name_size;
+ ret->executed_macro_id=NULL;
Queue_Init(ret->ungeted_tokens);
return ret;
}
+ void token_ptr_pop_context(struct Token_Pointer *ptr)
+ {
+ wonky_assert(ptr && ptr->call_stack && ptr->call_stack->size && ptr->call_stack->first->data);
+ wonky_assert(ptr->context && !ptr->context->barrier);
+
+ if(ptr->context->executed_macro_id)
+ ptr->context->executed_macro_id->was_already_expanded_as_a_macro=0;
+
+ delete_token_ptr_context(ptr->context);
+ ptr->context=Stack_Pop(ptr->call_stack);
+ }
_Bool token_ptr_has_remaining_tokens(struct Token_Pointer *ptr)
{
if(ptr->state==TOKEN_POINTER_STATE_ERROR)
}else
{
- while(ptr->context->number_of_remaining_tokens==0 &&
- ptr->context->barrier_number==0 &&
- ptr->call_stack->size>0)
- {
- delete_token_ptr_context(ptr->context);
- ptr->context=Stack_Pop(ptr->call_stack);
- }
- return ptr->context->number_of_remaining_tokens!=0;
+ while(!token_ptr_has_remaining_tokens_in_current_context(ptr)
+ && ptr->call_stack->size>0
+ && !ptr->context->barrier)
+ token_ptr_pop_context(ptr);
+ return token_ptr_has_remaining_tokens_in_current_context(ptr);
}
}
+ _Bool token_ptr_has_remaining_tokens_in_current_context(struct Token_Pointer *ptr)
+ {
+ wonky_assert(ptr->context!=NULL);
+ return ptr->context->number_of_remaining_tokens!=0 || ptr->context->ungeted_tokens->size!=0;
+
+ }
void token_ptr_unget_token(struct Token_Pointer *ptr,struct token *token)
{
Queue_Push(ptr->context->ungeted_tokens,token);
else
return NULL;
}
- unsigned int token_ptr_get_current_barrier_number(struct Token_Pointer *ptr)
- {
- return ptr->context->barrier_number;
- }
- unsigned int token_ptr_set_barrier(struct Token_Pointer *ptr)
+ void token_ptr_set_barrier(struct Token_Pointer *ptr)
{
- return ptr->context->barrier_number=ptr->next_barrier_number++;
+ wonky_assert(ptr && ptr->context && !ptr->context->barrier);
+ ptr->context->barrier=1;
}
void token_ptr_clear_barrier(struct Token_Pointer *ptr)
{
- ptr->context->barrier_number=0;
+ wonky_assert(ptr && ptr->context && ptr->context->barrier);
+ ptr->context->barrier=0;
}
void token_ptr_jump_to(struct Token_Pointer *ptr,struct Queue_Node *where_to,size_t number_of_remaining_tokens)
{
F diff --git a/src/semantics/program/translation_unit.h b/src/semantics/program/translation_unit.h --- a/src/semantics/program/translation_unit.h +++ b/src/semantics/program/translation_unit.h
size_t column;
char *filename;
size_t filename_size;
- unsigned int barrier_number;/*0 means no barrier*/
+
+ /*
+ Blocks popping of context. This is useful
+ when parsing expressions in preprocessing directives
+ and in similar line oriented situations.
+ */
+ _Bool barrier;
+
+ /*
+ points to the macro id if this context is the replacement tokens
+ are what this context is iterating over.
+ NULL otherwise
+ */
+ struct identifier *executed_macro_id;
};
struct Token_Pointer
{
enum Token_Pointer_State state;
_Bool is_in_conditional_directive;/*TODO move this into the state*/
- size_t macro_expansion_number;
unsigned int next_barrier_number;
+
};
struct Preprocessing_Translation_Unit* get_preprocessing_translation_unit(struct Source_File *source);
void delete_preprocessing_translation_unit(struct Preprocessing_Translation_Unit *unit);
+ void delete_preprocessing_translation_unit_but_not_the_tokens(struct Preprocessing_Translation_Unit *unit);
void push_token_into_preprocessing_translation_unit(struct Preprocessing_Translation_Unit *unit,struct token *token);
struct token* token_ptr_get_token_under_pointer_in_preprocessing_directive(struct Token_Pointer *token_pointer);
struct Token_Pointer* get_token_ptr(struct Preprocessing_Translation_Unit *unit,struct Program *program);
void delete_token_ptr(struct Token_Pointer *ptr);
struct Token_Pointer_Context* get_token_ptr_context(struct Queue_Node *start,size_t number_of_remaining_tokens);
+ void token_ptr_pop_context(struct Token_Pointer *ptr);
_Bool token_ptr_has_remaining_tokens(struct Token_Pointer *ptr);
+ _Bool token_ptr_has_remaining_tokens_in_current_context(struct Token_Pointer *ptr);
size_t token_ptr_get_functionlike_macro_number(struct Token_Pointer *ptr);
void token_ptr_unget_token(struct Token_Pointer *ptr,struct token *token);
-
-
void token_ptr_execute_include_directive(struct Token_Pointer *ptr,struct token_include_directive *include_directive);
void token_ptr_execute_if_directive(struct Token_Pointer *ptr,struct token_if_directive *if_directive);
void token_ptr_execute_ifdef_directive(struct Token_Pointer *ptr,struct token_ifdefndef_directive *ifdef_directive);
/*0 means no barrier in current context*/
unsigned int token_ptr_get_current_barrier_number(struct Token_Pointer *ptr);
/*returns set barrier number*/
- unsigned int token_ptr_set_barrier(struct Token_Pointer *ptr);
+ void token_ptr_set_barrier(struct Token_Pointer *ptr);
void token_ptr_clear_barrier(struct Token_Pointer *ptr);
void token_ptr_jump_to(struct Token_Pointer *ptr,struct Queue_Node *where_to,size_t number_of_remaining_tokens);
F diff --git a/src/semantics/value/value.c b/src/semantics/value/value.c --- a/src/semantics/value/value.c +++ b/src/semantics/value/value.c
case VALUE_VOID:
return 0;
}
+ #warning probably reaches here in a situation similar to int main() { return; } ??
wonky_assert(SHOULD_NOT_REACH_HERE);
}
F diff --git a/src/syntax/automatas/automata.c b/src/syntax/automatas/automata.c --- a/src/syntax/automatas/automata.c +++ b/src/syntax/automatas/automata.c
.data="defined",
.last_defined_macro_with_this_id=(struct token*)&(struct token_defined_unary_operator){.type=PKW_DEFINE},
.number_of_translation_unit_where_id_was_last_defined_as_a_macro=0,
- .macro_expansion_number=0,
+ .was_already_expanded_as_a_macro=0,
};
enum Source_Chars get_ch(const char *str,size_t limit)
{
F diff --git a/src/syntax/identifier/identifier.c b/src/syntax/identifier/identifier.c --- a/src/syntax/identifier/identifier.c +++ b/src/syntax/identifier/identifier.c
ret->data[size]='\0';
ret->last_defined_macro_with_this_id=NULL;
ret->number_of_translation_unit_where_id_was_last_defined_as_a_macro=0;
- ret->macro_expansion_number=0;
+ ret->was_already_expanded_as_a_macro=0;
ret->hold_functionlike_define_directive=NULL;
return ret;
}
- _Bool id_is_a_macro(struct identifier *id,size_t macro_expansion_number,size_t translation_unit_number)
+ _Bool id_is_a_macro(struct identifier *id,size_t translation_unit_number)
{
/*dark majick*/
return (id->number_of_translation_unit_where_id_was_last_defined_as_a_macro == translation_unit_number
&&
- id->macro_expansion_number!=macro_expansion_number);
+ !id->was_already_expanded_as_a_macro);
}
#endif
F diff --git a/src/syntax/identifier/identifier.h b/src/syntax/identifier/identifier.h --- a/src/syntax/identifier/identifier.h +++ b/src/syntax/identifier/identifier.h
struct functionlike_define_directive_argument *hold_functionlike_define_directive_argument;
size_t number_of_translation_unit_where_id_was_last_defined_as_a_macro;
- size_t macro_expansion_number;
+ /*use this to prevent bottomless recursion in macro expansion*/
+ _Bool was_already_expanded_as_a_macro;
};
struct identifier* get_identifier(char *data,size_t size);
- _Bool id_is_a_macro(struct identifier *id,size_t macro_expansion_number,size_t translation_unit_number);
+ _Bool id_is_a_macro(struct identifier *id,size_t translation_unit_number);
#endif
F diff --git a/src/syntax/source_file.c b/src/syntax/source_file.c --- a/src/syntax/source_file.c +++ b/src/syntax/source_file.c
#include <source_file.h>
+ static struct Source_Name start_of_file_name
+ =
+ {
+ .name="TODO startoffile",
+ .name_size=sizeof("TODO startoffile")
+ };
struct Source_Location start_of_file
=
{
.line=0,
.column=0,
- .on_which_byte=0,
- .src_name=&(struct Source_Name){.name="TODO startoffile",.name_size=sizeof("TODO startoffile")},
+ .starting_byte_index=0,
+ .length=0,
+ .src=&(struct Source_File){.type=SOURCE_TEXT_PARTIAL_TEXT,.src_name=&start_of_file_name,.src="",.src_name=0},
};
struct Source_File* get_source_file_from_string(char *filename,size_t filename_size,struct Program *program)
{
ret->src_name=get_source_name("scratch pad");
ret->src=NULL;
ret->src_size=0;
- ret->canonic_name_size=0;
return ret;
}
- struct Source_Location* get_source_location(size_t line,size_t column,size_t on_which_byte,struct Source_Name *src_name)
+ struct Source_Location* get_source_location(size_t line,size_t column,size_t on_which_byte,size_t length,struct Source_File *src)
{
struct Source_Location *ret;
ret=wonky_malloc(sizeof(struct Source_Location));
ret->line=line;
ret->column=column;
- ret->on_which_byte=on_which_byte;
- ret->src_name=src_name;
+ ret->starting_byte_index=on_which_byte;
+ ret->length=length;
+ ret->src=src;
return ret;
}
struct Source_Location_Delta *ret;
wonky_assert(begining->line <= ending->line);
- wonky_assert(begining->on_which_byte <= ending->on_which_byte);
+ wonky_assert(begining->starting_byte_index <= ending->starting_byte_index);
ret=wonky_malloc(sizeof(struct Source_Location_Delta));
ret->line_offset=ending->line - begining->line;
F diff --git a/src/syntax/source_file.h b/src/syntax/source_file.h --- a/src/syntax/source_file.h +++ b/src/syntax/source_file.h
{
size_t line;
size_t column;
- size_t on_which_byte;
- struct Source_Name *src_name;
+ size_t starting_byte_index;
+ size_t length;
+ struct Source_File *src;
};
struct Source_Location_Delta
{
struct Source_Name *src_name;
char *src;
size_t src_size;
- size_t canonic_name_size;
};
struct Source_File* get_source_file_from_string(char *filename,size_t filename_size,struct Program *program);
struct Source_File* get_temp_source_file();
- struct Source_File* get_source_file_from_tokens(struct Token_Pointer *ptr);
- struct Source_Location* get_source_location(size_t line,size_t column,size_t on_which_byte,struct Source_Name *src_name);
+ struct Source_Location* get_source_location(size_t line,size_t column,size_t on_which_byte,size_t length,struct Source_File *src);
struct Source_Location_Delta* get_source_location_delta(struct Source_Location *begining,struct Source_Location *ending);
void source_file_expand(struct Source_File *src,size_t expand_byte_count);
F diff --git a/src/syntax/token/token.c b/src/syntax/token/token.c --- a/src/syntax/token/token.c +++ b/src/syntax/token/token.c
ret->error=get_translation_message(
msg,
program,
- current_location->src_name->name,
- current_location->src_name->name_size,
+ current_location->src->src_name->name,
+ current_location->src->src_name->name_size,
current_location->line,
current_location->column,
args);
*size=sizeof("constant")-1;
}else
{
- /*hack.*/
+ #warning hack
ret=(char*)lexer_type_to_string_map[token->type];
if(ret==NULL) ret="";
*size=gstrnlen(ret,100);
return type==PKW_FILE_MACRO || type==PKW_DATE_MACRO || type==PKW_LINE_MACRO || type==PKW_STDC_MACRO || type==PKW_STDC_HOSTED_MACRO || type==PKW_STDC_VERSION_MACRO ||
type==PKW_TIME_MACRO;
}
- _Bool token_is_a_macro(struct token *token,size_t macro_expansion_number,size_t translation_unit_number)
+ _Bool token_is_a_macro(struct token *token,size_t translation_unit_number)
{
wonky_assert(token!=NULL);
if(token->type==KW_ID)
- return id_is_a_macro(((struct token_identifier*)token)->id,macro_expansion_number,translation_unit_number);
+ return id_is_a_macro(((struct token_identifier*)token)->id,translation_unit_number);
else if(token_is_keyword(token))
- return id_is_a_macro(((struct token_keyword*)token)->id,macro_expansion_number,translation_unit_number);
+ return id_is_a_macro(((struct token_keyword*)token)->id,translation_unit_number);
else
return token_is_a_special_macro(token) || token_is_keyword(token);
}
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h --- a/src/syntax/token/token.h +++ b/src/syntax/token/token.h
_Bool token_is_identifier_in_preprocessing(struct token *token);
_Bool token_is_a_special_macro(struct token *token);
- _Bool token_is_a_macro(struct token *token,size_t macro_expansion_number,size_t translation_unit_number);
+ _Bool token_is_a_macro(struct token *token,size_t translation_unit_number);