WONKY



LOG | FILES | OVERVIEW


F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c --- a/src/frontend/lex/lex_preprocessing_directive.c +++ b/src/frontend/lex/lex_preprocessing_directive.c
ret=preprocessing_extract_next_directive(lexer_data);
if(ret==NULL)
- return get_error_token("PREPROCESSING EMPTY DIRECTIVE NOT SUPPORTED",where,lexer_data->previous_token_location,lexer_data->program);
+ return get_error_token("PREPROCESSING EMPTY DIRECTIVE NOT SUPPORTED",where,lexer_data->program);
return ret;
ret=wonky_malloc(sizeof(struct token_defined_unary_operator));
ret->type=PKW_DEFINED;
- ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->location=where;
hold_token=preprocessing_extract_next_token(lexer_data);
struct token *hold_token;
ret=wonky_malloc(sizeof(struct token_hashtag_unary_operator));
ret->type=PKW_HASHTAG_UNARY_OP;
- ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->location=where;
if(!preprocessing_eol(lexer_data))
hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
}while(preprocessing_get_and_check_token(lexer_data,KW_HASHTAG_HASHTAG) && !preprocessing_eol(lexer_data));
if(!preprocessing_eol(lexer_data))
Queue_Push(ret_q,previous_token);
- return (struct token*)get_hashtag_hashtag_operator(where,lexer_data->previous_token_location,ret_q);
+ return (struct token*)get_hashtag_hashtag_operator(where,ret_q);
}
}
struct token* preprocessing_extract_next_directive(struct Lexer_Data *lexer_data)
lexer_data->where_in_src-where_does_the_token_start_in_the_source_file,
lexer_data->src
),
- lexer_data->previous_token_location,
lexer_data->program);
ret=preprocessing_make_token_finishing_on_node(lexer_data, hold_node, where_does_the_token_start_in_the_source_file,extract_directive,extract_defined_statement);
return preprocessing_lex_if_directive(lexer_data,token_location,PKW_ELIF);
}
else
- return get_error_token("Stray #elif not participating in any #if",token_location,lexer_data->previous_token_location,lexer_data->program);
+ return get_error_token("Stray #elif not participating in any #if",token_location,lexer_data->program);
case PKW_ELSE:
if(lexer_data->is_in_if_directive_body)
return preprocessing_return_else_token(lexer_data,token_location);
else
- return get_error_token("Stray #else not participating in any #if",token_location,lexer_data->previous_token_location,lexer_data->program);
+ return get_error_token("Stray #else not participating in any #if",token_location,lexer_data->program);
case PKW_ENDIF:
if(lexer_data->is_in_if_directive_body)
return preprocessing_return_endif_token(lexer_data,token_location);
else
- return get_error_token("Stray #endif not participating in any #if",token_location,lexer_data->previous_token_location,lexer_data->program);
+ return get_error_token("Stray #endif not participating in any #if",token_location,lexer_data->program);
case PKW_INCLUDE:
return preprocessing_lex_include_directive(lexer_data,token_location);
case PKW_DEFINE:
case PKW_ERROR:
return preprocessing_lex_error_directive(lexer_data,token_location);
case PKW_PRAGMA:
- return get_error_token("PREPROCESSING PRAGMA NOT DONE",token_location,lexer_data->previous_token_location,lexer_data->program);
+ return get_error_token("PREPROCESSING PRAGMA NOT DONE",token_location,lexer_data->program);
default:
return lexer_make_token_finishing_on_node(lexer_data,finishing_node,start_position);
}
ret=wonky_malloc(sizeof(struct token_line_directive));
ret->type=PKW_LINE;
- ret->directive_delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->location=where;
ret->filename=NULL;
ret=wonky_malloc(sizeof(struct token_error_directive));
ret->type=PKW_ERROR;
- ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->location=where;
hold_error_size=lexer_data->where_in_src-hold_start_location;
ret->error_message=get_wonky_message(WONKY_MESSAGE_TYPE_ERROR,WONKY_MESSAGE_SOURCE_TRANSLATION,where->line,where->column,where->src->src_name->name,where->src->src_name->name_size,"#error");
{
struct token_pragma_directive *ret;
ret->type=PKW_PRAGMA;
- ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->location=where;
ret->pragma_type=PRAGMA_TYPE_END;
preprocessing_goto_end_of_line(lexer_data);
return (struct token*)ret;
ret=wonky_malloc(sizeof(struct token_include_directive));
ret->type=PKW_INCLUDE;
- ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->location=where;
ret->tokens=wonky_malloc(sizeof(struct Queue));
Queue_Init(ret->tokens);
ret=wonky_malloc(sizeof(struct token_if_directive));
ret->type=if_type;
- ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->location=where;
ret->controlling_expression=wonky_malloc(sizeof(struct Queue));
ret->if_true=wonky_malloc(sizeof(struct Queue));
ret=wonky_malloc(sizeof(struct token_ifdefndef_directive));
ret->type=type;
- ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->location=where;
ret->if_defined=wonky_malloc(sizeof(struct Queue));
ret->if_undefined=wonky_malloc(sizeof(struct Queue));
preprocessing_goto_end_of_line(lexer_data);
delete_token((struct token*)hold_token);
/*NOTICE*/
- return get_error_token("Id exprected after #define directive",where,lexer_data->previous_token_location,lexer_data->program);
+ return get_error_token("Id exprected after #define directive",where,lexer_data->program);
}else if(hold_token->type==KW_ID)
{
hold_id=((struct token_identifier*)hold_token)->id;
hold_hold_token=preprocessing_extract_next_token(lexer_data);
if(hold_hold_token==NULL)
{
- ret=get_normal_define_directive_token(where,lexer_data->previous_token_location,hold_id);
+ ret=get_normal_define_directive_token(where,hold_id);
delete_token((struct token*)hold_token);
return ret;
}else if(hold_hold_token->type==KW_OPEN_NORMAL)
struct token_normal_define_directive *ret;
- ret=(struct token_normal_define_directive*)get_normal_define_directive_token(where,lexer_data->previous_token_location,id);
+ ret=(struct token_normal_define_directive*)get_normal_define_directive_token(where,id);
if(first_replacement_token)
{
Queue_Push(ret->define->replacement_tokens,first_replacement_token);
{
struct token_functionlike_define_directive *ret;
- ret=(struct token_functionlike_define_directive*)get_functionlike_define_directive_token(where,lexer_data->previous_token_location,id);
+ ret=(struct token_functionlike_define_directive*)get_functionlike_define_directive_token(where,id);
preprocessing_parse_functionlike_macro_id_list(lexer_data,ret);
struct token *hold_token;
ret=wonky_malloc(sizeof(struct token_undef_directive));
ret->type=PKW_UNDEF;
- ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->location=where;
ret->id=NULL;
hold_token=preprocessing_extract_next_token(lexer_data);
{
return get_functionlike_define_directive_argument_token(
where,
- lexer_data->previous_token_location,
((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive_argument);
}else if(token_is_keyword(hold_token) && ((struct token_keyword*)hold_token)->id->hold_functionlike_define_directive==directive)
{
return get_functionlike_define_directive_argument_token(
where,
- lexer_data->previous_token_location,
((struct token_keyword*)hold_token)->id->hold_functionlike_define_directive_argument);
}else if(hold_token->type==KW_HASHTAG)
{
struct token* preprocessing_return_else_token(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
static struct token error={.type=PKW_ELSE};
- error.delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ error.location=where;
return (struct token*)&error;
}
struct token* preprocessing_return_endif_token(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
static struct token endif={.type=PKW_ENDIF};
- endif.delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ endif.location=where;
return (struct token*)&endif;
}
#endif
F diff --git a/src/frontend/lex/lexer.c b/src/frontend/lex/lexer.c --- a/src/frontend/lex/lexer.c +++ b/src/frontend/lex/lexer.c
ret->automata_view=AUTOMATA_VIEW_NORMAL;
ret->src=src;
ret->program=program;
- ret->previous_token_location=&start_of_file;
ret->buffer_token=NULL;
return ret;
lexer_data->where_in_src-where_does_the_token_start_in_the_source_file,
lexer_data->src
),
- lexer_data->previous_token_location,
lexer_data->program);
ret=lexer_make_token_finishing_on_node(lexer_data, hold_node, where_does_the_token_start_in_the_source_file);
lexer_data->is_in_the_begining_of_line=0;
- lexer_data->previous_token_location=ret->delta->location;
return ret;
}
case KW_HASHTAG_HASHTAG:
if(lexer_data->automata_view==AUTOMATA_VIEW_PREPROCESSING_DIRECTIVE)
{
- return get_small_token(KW_HASHTAG_HASHTAG,token_location,lexer_data->previous_token_location);
+ return get_small_token(KW_HASHTAG_HASHTAG,token_location);
}else
{
- return get_error_token("Ran into ## while not in a preprocessing directive. Invalid syntax.",token_location,lexer_data->previous_token_location,lexer_data->program);
+ return get_error_token("Ran into ## while not in a preprocessing directive. Invalid syntax.",token_location,lexer_data->program);
}
break;
case KW_HASHTAG:
if(lexer_data->automata_view==AUTOMATA_VIEW_PREPROCESSING_DIRECTIVE)
{
- return get_small_token(KW_HASHTAG,token_location,lexer_data->previous_token_location);
+ return get_small_token(KW_HASHTAG,token_location);
}else
{
if(!lexer_data->is_in_the_begining_of_line)
{
- return get_error_token("# is not in the begining of a logical line",token_location,lexer_data->previous_token_location,lexer_data->program);
+ return get_error_token("# is not in the begining of a logical line",token_location,lexer_data->program);
}else
{
return lex_preprocessing_directive(lexer_data,token_location);
id=get_identifier(lexer_data->src->src+start_position,lexer_data->where_in_src-start_position);
finishing_node->data=id;
}
- return get_id_token(finishing_node->data,token_location,lexer_data->previous_token_location);
+ return get_id_token(finishing_node->data,token_location);
break;
case KW_AUTO:
case KW_DO:
id=get_identifier(lexer_data->src->src+start_position,lexer_data->where_in_src-start_position);
finishing_node->data=id;
}
- return get_keyword_token(finishing_node->keyword,token_location,lexer_data->previous_token_location,finishing_node->data);
+ return get_keyword_token(finishing_node->keyword,token_location,finishing_node->data);
case KW_EXCLAMATION:
case KW_PERCENT:
case KW_OPEN_CURLY:
case KW_DIV_EQ:
case KW_FORWARD_SLASH:
- return get_punctuator_token(finishing_node->keyword,token_location,lexer_data->previous_token_location);
+ return get_punctuator_token(finishing_node->keyword,token_location);
case KW_HEXADECIMAL_CONSTANT:
case KW_DECIMAL_CONSTANT:
case KW_FLOAT_HEXADECIMAL_CONSTANT:
case KW_CHAR_CONSTANT:
case KW_WIDE_CHAR_CONSTANT:
- return get_constant_token(finishing_node->keyword,token_location,lexer_data->previous_token_location,lexer_data->src->src+start_position,lexer_data->where_in_src-start_position);
+ return get_constant_token(finishing_node->keyword,token_location,lexer_data->src->src+start_position,lexer_data->where_in_src-start_position);
case KW_STRING:
case KW_WIDE_STRING:
- return get_string_token(finishing_node->keyword,token_location,lexer_data->previous_token_location,lexer_data->src->src+start_position+1,lexer_data->where_in_src-start_position-2);
+ return get_string_token(finishing_node->keyword,token_location,lexer_data->src->src+start_position+1,lexer_data->where_in_src-start_position-2);
case PKW_FILE_MACRO:
- return get_file_macro_token(token_location,lexer_data->previous_token_location);
+ return get_file_macro_token(token_location);
case PKW_DATE_MACRO:
- return get_date_macro_token(token_location,lexer_data->previous_token_location);
+ return get_date_macro_token(token_location);
case PKW_LINE_MACRO:
- return get_line_macro_token(token_location,lexer_data->previous_token_location);
+ return get_line_macro_token(token_location);
case PKW_STDC_MACRO:
- return get_stdc_macro_token(token_location,lexer_data->previous_token_location);
+ return get_stdc_macro_token(token_location);
case PKW_STDC_HOSTED_MACRO:
- return get_stdc_hosted_macro_token(token_location,lexer_data->previous_token_location);
+ return get_stdc_hosted_macro_token(token_location);
case PKW_STDC_VERSION_MACRO:
- return get_stdc_version_macro_token(token_location,lexer_data->previous_token_location);
+ return get_stdc_version_macro_token(token_location);
case PKW_TIME_MACRO:
- return get_time_macro_token(token_location,lexer_data->previous_token_location);
+ return get_time_macro_token(token_location);
case PKW_IF:
case PKW_IFDEF:
case PKW_IFNDEF:
id=get_identifier(lexer_data->src->src+start_position,lexer_data->where_in_src-start_position);
finishing_node->data=id;
}
- return get_id_token(finishing_node->data,token_location,lexer_data->previous_token_location);
+ return get_id_token(finishing_node->data,token_location);
break;
case PKW_DEFINED:
if(lexer_data->automata_view==AUTOMATA_VIEW_PREPROCESSING_DIRECTIVE)
id=get_identifier(lexer_data->src->src+start_position,lexer_data->where_in_src-start_position);
finishing_node->data=id;
}
- return get_id_token(finishing_node->data,token_location,lexer_data->previous_token_location);
+ return get_id_token(finishing_node->data,token_location);
}
default:
- return get_error_token("Unexpected token",token_location,lexer_data->previous_token_location,lexer_data->program);
+ return get_error_token("Unexpected token",token_location,lexer_data->program);
}
wonky_assert(SHOULD_NOT_REACH_HERE);
F diff --git a/src/frontend/lex/lexer.h b/src/frontend/lex/lexer.h --- a/src/frontend/lex/lexer.h +++ b/src/frontend/lex/lexer.h
struct Source_File *src;
struct Program *program;
- struct Source_Location *previous_token_location;
-
struct token *buffer_token;
};
F diff --git a/src/frontend/parse/parse_declaration.c b/src/frontend/parse/parse_declaration.c --- a/src/frontend/parse/parse_declaration.c +++ b/src/frontend/parse/parse_declaration.c
while(!get_and_check(translation_data,KW_SEMICOLON))
{
hold=parse_declarator(translation_data,scope,prototype);
- if(hold->denotation==DT_Function)
+ if(has_new_errors(translation_data))
+ {
+ chase_next_semicolumn(translation_data);
+ }
+ else if(hold->denotation==DT_Function)
{
//Scope_Push(scope,hold,translation_data);
/*check if this is a function definition*/
{
/*TODO error*/
push_generic_error(translation_data->program,"enum definition error, expected an id");
- Queue_Push(enumeration->consts,get_denoted_error(NULL));
+ //Queue_Push(enumeration->consts,get_denoted_error(NULL));
return ;
}
}while(!get_and_check(translation_data,KW_CLOSE_CURLY));
F diff --git a/src/misc/print.c b/src/misc/print.c --- a/src/misc/print.c +++ b/src/misc/print.c
void print_token(struct wonky_stream *out,struct token *token)
{
- wonky_fprintf(out,"[TOKEN %WSl: ",token->delta->location);
+ wonky_fprintf(out,"[TOKEN %WSl: ",token->location);
print_token_text(out,token);
wonky_fprintf(out," | kw=");
print_keyword_enum(out,token->type);
void print_raw_token_text(struct wonky_stream *out,struct token *token)
{
wonky_write(out,
- token->delta->location->src->src
+ token->location->src->src
+
- token->delta->location->starting_byte_index
+ token->location->starting_byte_index
,
- token->delta->location->length);
+ token->location->length);
}
#endif
F diff --git a/src/misc/wonky_stream.c b/src/misc/wonky_stream.c --- a/src/misc/wonky_stream.c +++ b/src/misc/wonky_stream.c
{
if(print_line)
wonky_write(s,"\n",1);
- wonky_fprintf(s,"\tExpanded from %WI %WSl",hold->executed_macro_id,hold->executed_macro_id->last_defined_macro_with_this_id->delta->location);
+ wonky_fprintf(s,"\tExpanded from %WI %WSl",hold->executed_macro_id,hold->executed_macro_id->last_defined_macro_with_this_id->location);
print_line=1;
}
case WONKY__CONVERSION_WONKY_TOKEN_LINE:
{
struct token *token=va_arg(args,struct token*);
- wonky_fprintf(s,"%Wt",token);
- size_t begining_of_real_line=token->delta->location->starting_byte_index;
- size_t ending_of_real_line=token->delta->location->starting_byte_index;
- for(begining_of_real_line ;begining_of_real_line && token->delta->location->src->src[begining_of_real_line]!='\n' ;--begining_of_real_line);
- for(ending_of_real_line ;ending_of_real_line<token->delta->location->src->src_size && token->delta->location->src->src[ending_of_real_line]!='\n' ;++ending_of_real_line);
+ size_t begining_of_real_line=token->location->starting_byte_index;
+ size_t ending_of_real_line=token->location->starting_byte_index;
+ for(begining_of_real_line ;begining_of_real_line && token->location->src->src[begining_of_real_line]!='\n' ;--begining_of_real_line);
+ for(ending_of_real_line ;ending_of_real_line<token->location->src->src_size && token->location->src->src[ending_of_real_line]!='\n' ;++ending_of_real_line);
begining_of_real_line+=!!(begining_of_real_line);
wonky_assert(begining_of_real_line<ending_of_real_line);
- wonky_write(s,token->delta->location->src->src+begining_of_real_line,ending_of_real_line-begining_of_real_line);
+ wonky_write(s,token->location->src->src+begining_of_real_line,ending_of_real_line-begining_of_real_line);
}
break;
case WONKY__CONVERSION_WONKY_SOURCE_LOCATION:
F diff --git a/src/semantics/ast.c b/src/semantics/ast.c --- a/src/semantics/ast.c +++ b/src/semantics/ast.c
hold_denoted=check_ordinary(scope,id->id);
if(hold_denoted==NULL)
{
- push_translation_error("using undeclared id - %WI\n%WSl \"%Wtl\"",translation_data,id->id,id->delta->location,id);
+ push_translation_error("using undeclared id - %WI\n%WSl \"%Wtl\"",translation_data,id->id,id->location,id);
wonky_free(ret);
return (struct AST_Expression*)get_error_tree(NULL);
}else
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c --- a/src/semantics/program/translation_unit.c +++ b/src/semantics/program/translation_unit.c
}
void token_ptr_execute_line_directive(struct Token_Pointer *ptr,struct token_line_directive *line_directive)
{
- if(line_directive->filename!=NULL)
- {
- ptr->context->filename=line_directive->filename;
- ptr->context->filename_size=line_directive->filename_size;
- }
-
- ptr->context->line=line_directive->line-2;
- ptr->context->column=0;
-
+ #warning implement line directive
token_ptr_goto_next_token(ptr);
}
void token_ptr_execute_error_directive(struct Token_Pointer *ptr,struct token_error_directive *error_directive)
{
line=(struct token_constant*)get_constant_token(
KW_DECIMAL_CONSTANT,
- operator->delta->location,
- operator->delta->location,
+ operator->location,
"1",
1);
}else
{
line=(struct token_constant*)get_constant_token(
KW_DECIMAL_CONSTANT,
- operator->delta->location,
- operator->delta->location,
+ operator->location,
"0",
1);
}
- Queue_Push(ptr->context->ungeted_tokens,line);
+ token_ptr_unget_token(ptr,(struct token*)line);
}
void token_ptr_execute_file_special_macro(struct Token_Pointer *ptr,struct token *directive)
{
token_ptr_goto_next_token(ptr);
file=(struct token_string*)get_string_token(
KW_STRING,
- directive->delta->location,
- directive->delta->location,
+ directive->location,
ptr->context->filename,
ptr->context->filename_size);
- Queue_Push(ptr->context->ungeted_tokens,file);
+ token_ptr_unget_token(ptr,(struct token*)file);
}
void token_ptr_execute_line_special_macro(struct Token_Pointer *ptr,struct token *directive)
{
line=(struct token_constant*)get_constant_long_long_int_token(
- directive->delta->location,
- directive->delta->location,
+ directive->location,
ptr->context->line+1);
- Queue_Push(ptr->context->ungeted_tokens,line);
+ token_ptr_unget_token(ptr,(struct token*)line);
}
void token_ptr_execute_stdc_special_macro(struct Token_Pointer *ptr,struct token *directive)
{
- struct token_constant *line;
+ struct token_constant *stdc;
token_ptr_goto_next_token(ptr);
- line=(struct token_constant*)get_constant_token(
+ stdc=(struct token_constant*)get_constant_token(
KW_DECIMAL_CONSTANT,
- directive->delta->location,
- directive->delta->location,
+ directive->location,
"1",
1);
- Queue_Push(ptr->context->ungeted_tokens,line);
+ token_ptr_unget_token(ptr,(struct token*)stdc);
}
void token_ptr_execute_macro(struct Token_Pointer *ptr,struct identifier *id)
{
hold_string_token=(struct token_string*)
get_token_from_two_strings_with_a_space_between(
hold_string_token,
- (struct token_string*)get_string_token(KW_STRING,hold_token->delta->location,hold_token->delta->location,hold_str,hold_str_size)
+ (struct token_string*)get_string_token(KW_STRING,hold_token->location,hold_token->delta->location,hold_str,hold_str_size)
);
*/
}
- hold_string_token=(struct token_string*)get_string_token(KW_STRING,op->delta->location,op->delta->location,hold_text.cs,wonky_string_number_of_bytes(hold_text));
- Queue_Push(ptr->context->ungeted_tokens,hold_string_token);
+ hold_string_token=(struct token_string*)get_string_token(KW_STRING,op->location,hold_text.cs,wonky_string_number_of_bytes(hold_text));
+ token_ptr_unget_token(ptr,(struct token*)hold_string_token);
wonky_string_stream_delete(&text_stream);
wonky_string_delete(&hold_text);
struct token *ret;
size_t left_size,right_size;
- left_size=left->delta->location->length;
- right_size=right->delta->location->length;
+ left_size=left->location->length;
+ right_size=right->location->length;
if(left_size==0)
return right;
if(right_size==0)
temp_source_file->src_size=left_size+right_size;
gmemmove(temp_source_file->src,
- left->delta->location->src->src+left->delta->location->starting_byte_index,
+ left->location->src->src+left->location->starting_byte_index,
left_size);
gmemmove(temp_source_file->src+left_size,
- right->delta->location->src->src+right->delta->location->starting_byte_index,
+ right->location->src->src+right->location->starting_byte_index,
right_size);
temp_lexer_data=get_lexer_data(temp_source_file,program);
for(i=1,it=arg->argument->first_in_argument_substitution_tokens;i<arg->argument->number_of_substitution_tokens;
++i,it=it->prev)
{
- Queue_Push(ptr->context->ungeted_tokens,it->data);
+ token_ptr_unget_token(ptr,(struct token*)it->data);
}
hold_left=(struct token*)it->data;
}
hold_right=(struct token*)arg->argument->first_in_argument_substitution_tokens->data;
hold_left=token_ptr_execute_concat_two_tokens(hold_left,hold_right,ptr->program);
- Queue_Push(ptr->context->ungeted_tokens,hold_left);
+ token_ptr_unget_token(ptr,(struct token*)hold_left);
for(i=1,it=arg->argument->first_in_argument_substitution_tokens->prev;
i<arg->argument->number_of_substitution_tokens;
++i,it=it->prev)
{
- Queue_Push(ptr->context->ungeted_tokens,it->data);
+ token_ptr_unget_token(ptr,(struct token*)it->data);
}
hold_left=(struct token*)it->data;
}else if(arg->argument->number_of_substitution_tokens==1)
hold_left=token_ptr_execute_concat_two_tokens(hold_left,hold_right,ptr->program);
}
}
- Queue_Push(ptr->context->ungeted_tokens,hold_left);
+ token_ptr_unget_token(ptr,(struct token*)hold_left);
token_ptr_clear_barrier(ptr);
}
return 1;
case KW_ID:
{
+ wonky_printf("hitting id token %Wt",token);
struct token_identifier *hold_id_token;
hold_id_token=(struct token_identifier*)token;
if( id_is_a_macro(
return 1;
}else if(token_pointer->is_in_conditional_directive)
{
+ wonky_printf("id token %Wt is transformed into a '0'\n!",token);
token_ptr_goto_next_token(token_pointer);
/*all undefined id tokens in control expression are replaced with 0*/
- Queue_Push(token_pointer->context->ungeted_tokens,get_constant_token(
+ token_ptr_unget_token(token_pointer,(struct token*)get_constant_token(
KW_DECIMAL_CONSTANT,
- hold_id_token->delta->location,
- hold_id_token->delta->location,
+ hold_id_token->location,
"0",
1));
return 0;
token_ptr_goto_next_token(token_pointer);
/*all undefined id tokens in control expression are replaced with 0*/
- Queue_Push(token_pointer->context->ungeted_tokens,get_constant_token(
+ token_ptr_unget_token(token_pointer,(struct token*)get_constant_token(
KW_DECIMAL_CONSTANT,
- hold_kw_token->delta->location,
- hold_kw_token->delta->location,
+ hold_kw_token->location,
"0",
1));
return 0;
}
void token_ptr_execute_stdc_hosted_special_macro(struct Token_Pointer *ptr,struct token *directive)
{
- struct token_constant *line;
+ struct token_constant *stdc;
token_ptr_goto_next_token(ptr);
- line=(struct token_constant*)get_constant_token(
+ stdc=(struct token_constant*)get_constant_token(
KW_DECIMAL_CONSTANT,
- directive->delta->location,
- directive->delta->location,
+ directive->location,
"1",
1);
- Queue_Push(ptr->context->ungeted_tokens,line);
+ token_ptr_unget_token(ptr,(struct token*)stdc);
}
void token_ptr_execute_stdc_version_special_macro(struct Token_Pointer *ptr,struct token *directive)
{
- struct token_constant *line;
+ struct token_constant *stdc;
token_ptr_goto_next_token(ptr);
- line=(struct token_constant*)get_constant_token(
+ stdc=(struct token_constant*)get_constant_token(
KW_LONG_DECIMAL_CONSTANT,
- directive->delta->location,
- directive->delta->location,
+ directive->location,
"199901",
sizeof("199901")-1);
- Queue_Push(ptr->context->ungeted_tokens,line);
+ token_ptr_unget_token(ptr,(struct token*)stdc);
}
void delete_token_ptr(struct Token_Pointer *ptr)
{
ret->number_of_remaining_tokens=number_of_remaining_tokens;
ret->ungeted_tokens=wonky_malloc(sizeof(struct Queue));
- hold_location=((struct token*)start->data)->delta->location;
+ hold_location=((struct token*)start->data)->location;
//ret->line=hold_location->line;
//ret->column=hold_location->column;
{
wonky_assert(ptr->context!=NULL);
return ptr->context->number_of_remaining_tokens!=0 || ptr->context->ungeted_tokens->size!=0;
-
}
void token_ptr_unget_token(struct Token_Pointer *ptr,struct token *token)
{
+ wonky_printf("pushing token into ungetted tokens %Wt",token);
Queue_Push(ptr->context->ungeted_tokens,token);
}
void token_ptr_assume_location_of_token(struct Token_Pointer *ptr,struct token *token)
{
wonky_assert(token!=NULL);
- ptr->context->line+=token->delta->line_offset;
- ptr->context->column=token->delta->column;
+ ptr->context->line=token->location->line;
+ ptr->context->column=token->location->column;
}
_Bool token_ptr_has_buffered_tokens(struct Token_Pointer *ptr)
{
struct token* token_ptr_get_buffered_token(struct Token_Pointer *ptr)
{
if(token_ptr_has_buffered_tokens(ptr))
- return (struct token*)Queue_Pop(ptr->context->ungeted_tokens);
+ {
+ struct token *hold_token;
+ hold_token=Queue_Pop(ptr->context->ungeted_tokens);
+
+ return hold_token;
+ }
else
return NULL;
}
F diff --git a/src/semantics/program/translation_unit.h b/src/semantics/program/translation_unit.h --- a/src/semantics/program/translation_unit.h +++ b/src/semantics/program/translation_unit.h
void token_ptr_load_functionlike_macro_state_from_context(struct Token_Pointer_Context *ctx);
-
void token_ptr_execute_stringify_functionlike_macro_argument(struct Token_Pointer *ptr,struct token_hashtag_unary_operator *op);
void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,struct token_hashtag_hastag_operator *op);
F diff --git a/src/semantics/value/type.c b/src/semantics/value/type.c --- a/src/semantics/value/type.c +++ b/src/semantics/value/type.c
}
_Bool types_are_identical(struct Type *a,struct Type *b)
{
+ wonky_assert(a!=NULL && b!=NULL);
if(a->specifier!=b->specifier)
return 0;
F diff --git a/src/syntax/source_file.c b/src/syntax/source_file.c --- a/src/syntax/source_file.c +++ b/src/syntax/source_file.c
return ret;
}
- struct Source_Location_Delta* get_source_location_delta(struct Source_Location *begining,struct Source_Location *ending)
- {
- struct Source_Location_Delta *ret;
-
- wonky_assert(begining->line <= ending->line);
- wonky_assert(begining->starting_byte_index <= ending->starting_byte_index);
-
- ret=wonky_malloc(sizeof(struct Source_Location_Delta));
- ret->line_offset=ending->line - begining->line;
- ret->column=ending->column;
- ret->location=ending;
-
- return ret;
- }
void source_file_expand(struct Source_File *src,size_t expand_byte_count)
{
if(src->src)
F diff --git a/src/syntax/source_file.h b/src/syntax/source_file.h --- a/src/syntax/source_file.h +++ b/src/syntax/source_file.h
size_t length;
struct Source_File *src;
};
- struct Source_Location_Delta
- {
- size_t line_offset;
- size_t column; /*this is a copy from end_location*/
- struct Source_Location *location;
- };
struct Source_File
{
enum Source_Text_Type type;
struct Source_File* get_source_file_from_string(char *filename,size_t filename_size,struct Program *program);
struct Source_File* get_temp_source_file();
struct Source_Location* get_source_location(size_t line,size_t column,size_t on_which_byte,size_t length,struct Source_File *src);
- struct Source_Location_Delta* get_source_location_delta(struct Source_Location *begining,struct Source_Location *ending);
void source_file_expand(struct Source_File *src,size_t expand_byte_count);
F diff --git a/src/syntax/token/token.c b/src/syntax/token/token.c --- a/src/syntax/token/token.c +++ b/src/syntax/token/token.c
#define WONKY_TOKEN_C WONKY_TOKEN_C
#include <token.h>
- struct token* get_small_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct Source_Location *previous_location)
+ struct token* get_small_token(enum LEXER_TYPE type,struct Source_Location *current_location)
{
struct token_identifier *ret;
ret=wonky_malloc(sizeof(struct token_identifier));
ret->type=type;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
return (struct token*)ret;
}
- struct token* get_id_token(struct identifier *id,struct Source_Location *current_location,struct Source_Location *previous_location)
+ struct token* get_id_token(struct identifier *id,struct Source_Location *current_location)
{
struct token_identifier *ret;
ret=wonky_malloc(sizeof(struct token_identifier));
ret->type=KW_ID;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->id=id;
return (struct token*)ret;
}
- struct token* get_keyword_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id)
+ struct token* get_keyword_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct identifier *id)
{
struct token_keyword *ret;
ret=wonky_malloc(sizeof(struct token_keyword));
ret->type=type;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->id=id;
return (struct token*)ret;
}
- struct token* get_punctuator_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct Source_Location *previous_location)
+ struct token* get_punctuator_token(enum LEXER_TYPE type,struct Source_Location *current_location)
{
struct token_punctuator *ret;
ret=wonky_malloc(sizeof(struct token_punctuator));
ret->type=type;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->punctuator_type=PUNCTUATOR_NORMAL;/*TODO*/
return (struct token*)ret;
}
- struct token* get_constant_token(enum LEXER_TYPE bare_type,struct Source_Location *current_location,struct Source_Location *previous_location,char *data,size_t size)
+ struct token* get_constant_token(enum LEXER_TYPE bare_type,struct Source_Location *current_location,char *data,size_t size)
{
struct token_constant *ret;
ret=wonky_malloc(sizeof(struct token_constant));
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->type=KW_CONSTANT;
switch(bare_type)
return (struct token*)ret;
}
- struct token* get_constant_long_long_int_token(struct Source_Location *current_location,struct Source_Location *previous_location,long long int number)
+ struct token* get_constant_long_long_int_token(struct Source_Location *current_location,long long int number)
{
struct token_constant *ret;
ret=wonky_malloc(sizeof(struct token_constant));
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->type=KW_CONSTANT;
ret->constant=get_long_long_int_constant(number);
return (struct token*)ret;
}
- struct token* get_string_token(enum LEXER_TYPE bare_type,struct Source_Location *current_location,struct Source_Location *previous_location,char *data,size_t size)
+ struct token* get_string_token(enum LEXER_TYPE bare_type,struct Source_Location *current_location,char *data,size_t size)
{
struct token_string *ret;
ret=wonky_malloc(sizeof(struct token_string));
ret->type=bare_type;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
if(bare_type==KW_STRING)
{
return (struct token*)ret;
}
- struct token* get_include_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct Queue *tokens)
+ struct token* get_include_directive_token(struct Source_Location *current_location,struct Queue *tokens)
{
struct token_include_directive *ret;
ret=wonky_malloc(sizeof(struct token_string));
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->tokens=tokens;
ret->type=PKW_INCLUDE;
return (struct token*)ret;
}
- struct token* get_hashtag_hashtag_operator(struct Source_Location *current_location,struct Source_Location *previous_location,struct Queue *operands)
+ struct token* get_hashtag_hashtag_operator(struct Source_Location *current_location,struct Queue *operands)
{
struct token_hashtag_hastag_operator *ret;
ret=wonky_malloc(sizeof(struct token_hashtag_hastag_operator));
ret->type=PKW_HASHTAG_HASHTAG_OP;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->operands=operands;
return (struct token*)ret;
}
- struct token* get_error_token(const char *msg,struct Source_Location *current_location,struct Source_Location *previous_location,struct Program *program,...)
+ struct token* get_error_token(const char *msg,struct Source_Location *current_location,struct Program *program,...)
{
struct token_error *ret;
va_list args;
ret=wonky_malloc(sizeof(struct token_error));
ret->type=LT_ERROR;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->error=get_wonky_message(WONKY_MESSAGE_TYPE_ERROR,WONKY_MESSAGE_SOURCE_PREPROCESSING,current_location->line,current_location->column,current_location->src->src_name->name,current_location->src->src_name->name_size,"%s",hold_err.cs);
return (struct token*)ret;
struct token *ret;
ret=wonky_malloc(sizeof(struct token));
ret->type=LT_EOF;
- ret->delta=wonky_malloc(sizeof(struct Source_Location_Delta));
- ret->delta->line_offset=0;
- ret->delta->column=0;
- ret->delta->location=&start_of_file;
+ ret->location=&start_of_file;
return ret;
}
struct token_string *ret;
ret=wonky_malloc(sizeof(struct token_string));
ret->type=KW_STRING;
- ret->delta=first->delta;
+ ret->location=first->location;
ret->constant=concatenate_string_literals(first->constant,second->constant);
return (struct token*)ret;
struct token_string *ret;
ret=wonky_malloc(sizeof(struct token_string));
ret->type=KW_STRING;
- ret->delta=first->delta;
+ ret->location=first->location;
ret->constant=concatenate_string_literals_with_space_between(first->constant,second->constant);
return (struct token*)ret;
}
- struct token* get_file_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location)
+ struct token* get_file_macro_token(struct Source_Location *current_location)
{
struct token *ret;
ret=wonky_malloc(sizeof(struct token));
ret->type=PKW_FILE_MACRO;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
return ret;
}
1900 + timeptr->tm_year);
return result;
}
- struct token* get_date_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location)
+ struct token* get_date_macro_token(struct Source_Location *current_location)
{
struct tm *tm;
time_t t;
ret=ascdate(tm);
size=gstrnlen(ret,27);
- return (struct token*)get_string_token(KW_STRING,current_location,previous_location,ret,size);
+ return (struct token*)get_string_token(KW_STRING,current_location,ret,size);
error:
return (struct token*)get_string_token(
KW_STRING,
current_location,
- current_location,
"Time could not be determined",
sizeof("Time could not be determined"));
}
- struct token* get_line_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location)
+ struct token* get_line_macro_token(struct Source_Location *current_location)
{
struct token *ret;
ret=wonky_malloc(sizeof(struct token));
ret->type=PKW_LINE_MACRO;
- ret->delta=get_source_location_delta(previous_location,current_location);;
+ ret->location=current_location;
return ret;
}
- struct token* get_stdc_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location)
+ struct token* get_stdc_macro_token(struct Source_Location *current_location)
{
struct token *ret;
ret=wonky_malloc(sizeof(struct token));
ret->type=PKW_STDC_MACRO;
- ret->delta=get_source_location_delta(previous_location,current_location);;
+ ret->location=current_location;
return ret;
}
- struct token* get_stdc_hosted_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location)
+ struct token* get_stdc_hosted_macro_token(struct Source_Location *current_location)
{
struct token *ret;
ret=wonky_malloc(sizeof(struct token));
ret->type=PKW_STDC_HOSTED_MACRO;
- ret->delta=get_source_location_delta(previous_location,current_location);;
+ ret->location=current_location;
return ret;
}
- struct token* get_stdc_version_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location)
+ struct token* get_stdc_version_macro_token(struct Source_Location *current_location)
{
struct token *ret;
ret=wonky_malloc(sizeof(struct token));
ret->type=PKW_STDC_VERSION_MACRO;
- ret->delta=get_source_location_delta(previous_location,current_location);;
+ ret->location=current_location;
return ret;
}
);
return result;
}
- struct token* get_time_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location)
+ struct token* get_time_macro_token(struct Source_Location *current_location)
{
struct tm *tm;
time_t t;
return (struct token*)get_string_token(
KW_STRING,
current_location,
- previous_location,
ret,size);
error:
- return (struct token*)get_string_token(KW_STRING,current_location,current_location,"Time could not be determined",sizeof("Time could not be determined"));
+ return (struct token*)get_string_token(KW_STRING,current_location,"Time could not be determined",sizeof("Time could not be determined"));
}
char* get_string_from_token(struct token* token,size_t *size)
{
}
}
- struct token* get_normal_define_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id)
+ struct token* get_normal_define_directive_token(struct Source_Location *current_location,struct identifier *id)
{
struct token_normal_define_directive *ret;
ret=wonky_malloc(sizeof(struct token_normal_define_directive));
ret->type=PKW_DEFINE;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->define=get_normal_define_directive(id);
return (struct token*)ret;
}
- struct token* get_functionlike_define_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id)
+ struct token* get_functionlike_define_directive_token(struct Source_Location *current_location,struct identifier *id)
{
struct token_functionlike_define_directive *ret;
ret->type=PKW_FUNCTIONLIKE_DEFINE;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->define=get_functionlike_define_directive(id);
return (struct token*)ret;
}
- struct token* get_functionlike_define_directive_argument_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct functionlike_define_directive_argument *argument)
+ struct token* get_functionlike_define_directive_argument_token(struct Source_Location *current_location,struct functionlike_define_directive_argument *argument)
{
struct token_functionlike_define_argument *ret;
ret=wonky_malloc(sizeof(struct token_functionlike_define_argument));
ret->type=PKW_MACRO_ARGUMENT;
- ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->location=current_location;
ret->argument=argument;
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h --- a/src/syntax/token/token.h +++ b/src/syntax/token/token.h
struct token
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
};
struct token_identifier
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct identifier *id;
};
struct token_keyword
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct identifier *id;
};
struct token_punctuator
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
enum Punctuator_Token_Type punctuator_type;
};
struct token_constant
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct Constant *constant;
};
struct token_string
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct Constant *constant;
};
struct token_include_directive
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct Queue *tokens;
};
struct token_if_directive
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct Queue *controlling_expression;
struct Queue *if_true;
struct Queue *if_false;
struct token_ifdefndef_directive
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct token *id; /*keywords are also identifiers in preprocessing*/
struct Queue *if_defined;
struct Queue *if_undefined;
struct token_normal_define_directive
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct normal_define_directive *define;
};
struct token_functionlike_define_directive
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct functionlike_define_directive *define;
};
struct token_functionlike_define_argument
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct functionlike_define_directive_argument *argument;
};
struct token_undef_directive
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct token *id; /*keywords are also identifiers in preprocessing*/
};
struct token_line_directive
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *directive_delta;
+ struct Source_Location *location;
size_t line;
char *filename;
struct token_error_directive
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct Wonky_Message *error_message;
};
struct token_pragma_directive
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
enum Pragma_Type pragma_type;
};
struct token_defined_unary_operator
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct token *id;
};
struct token_hashtag_unary_operator
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct token_functionlike_define_argument *operand;
};
struct token_hashtag_hastag_operator
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct Queue *operands; /*queue of id and/or functionlike macro argument tokens*/
};
struct token_error
{
enum LEXER_TYPE type;
- struct Source_Location_Delta *delta;
+ struct Source_Location *location;
struct Wonky_Message *error;
};
void handle_splicing(struct token *word);
char compare_tokens(struct token *a,struct token *b);
- struct token* get_small_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct Source_Location *previous_location);
+ struct token* get_small_token(enum LEXER_TYPE type,struct Source_Location *current_location);
- struct token* get_id_token(struct identifier *id,struct Source_Location *current_location,struct Source_Location *previous_location);
- struct token* get_keyword_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id);
- struct token* get_punctuator_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct Source_Location *previous_location);
- struct token* get_constant_token(enum LEXER_TYPE bare_type,struct Source_Location *current_location,struct Source_Location *previous_location,char *data,size_t size);
- struct token* get_constant_long_long_int_token(struct Source_Location *current_location,struct Source_Location *previous_location,long long int number);
- struct token* get_string_token(enum LEXER_TYPE bare_type,struct Source_Location *current_location,struct Source_Location *previous_location,char *data,size_t size);
- struct token* get_include_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct Queue *tokens);
- struct token* get_if_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct Queue *controlling_tokens,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
+ struct token* get_id_token(struct identifier *id,struct Source_Location *current_location);
+ struct token* get_keyword_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct identifier *id);
+ struct token* get_punctuator_token(enum LEXER_TYPE type,struct Source_Location *current_location);
+ struct token* get_constant_token(enum LEXER_TYPE bare_type,struct Source_Location *current_location,char *data,size_t size);
+ struct token* get_constant_long_long_int_token(struct Source_Location *current_location,long long int number);
+ struct token* get_string_token(enum LEXER_TYPE bare_type,struct Source_Location *current_location,char *data,size_t size);
+ struct token* get_include_directive_token(struct Source_Location *current_location,struct Queue *tokens);
+ struct token* get_if_directive_token(struct Source_Location *current_location,struct Queue *controlling_tokens,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
- struct token* get_ifdef_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
+ struct token* get_ifdef_directive_token(struct Source_Location *current_location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
- struct token* get_ifdef_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
+ struct token* get_ifdef_directive_token(struct Source_Location *current_location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
- struct token* get_normal_define_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id);
+ struct token* get_normal_define_directive_token(struct Source_Location *current_location,struct identifier *id);
- struct token* get_functionlike_define_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id);
- struct token* get_functionlike_define_directive_argument_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct functionlike_define_directive_argument *argument);
+ struct token* get_functionlike_define_directive_token(struct Source_Location *current_location,struct identifier *id);
+ struct token* get_functionlike_define_directive_argument_token(struct Source_Location *current_location,struct functionlike_define_directive_argument *argument);
- struct token* get_undef_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id);
- struct token* get_line_directive_token(struct Source_Location *current_location,struct Source_Location *new_location,struct Source_Location *previous_location);
- struct token* get_error_directive_token(struct Source_Location *current_location,struct Source_Location *previous_location,struct token_string *error_message);
- struct token* get_pragma_directive(struct Source_Location *current_location,struct Source_Location *previous_location,enum Pragma_Type type);
- struct token* get_defined_unary_operator(struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id);
- struct token* get_hashtag_unary_operator(struct Source_Location *current_location,struct Source_Location *previous_location,struct token_functionlike_define_directive *operand);
- struct token* get_hashtag_hashtag_operator(struct Source_Location *current_location,struct Source_Location *previous_location,struct Queue *operands);
- struct token* get_error_token(const char *msg,struct Source_Location *current_location,struct Source_Location *previous_location,struct Program *program,...);
+ struct token* get_undef_directive_token(struct Source_Location *current_location,struct identifier *id);
+ struct token* get_line_directive_token(struct Source_Location *current_location,struct Source_Location *new_location);
+ struct token* get_error_directive_token(struct Source_Location *current_location,struct token_string *error_message);
+ struct token* get_pragma_directive(struct Source_Location *current_location,enum Pragma_Type type);
+ struct token* get_defined_unary_operator(struct Source_Location *current_location,struct identifier *id);
+ struct token* get_hashtag_unary_operator(struct Source_Location *current_location,struct token_functionlike_define_directive *operand);
+ struct token* get_hashtag_hashtag_operator(struct Source_Location *current_location,struct Queue *operands);
+ struct token* get_error_token(const char *msg,struct Source_Location *current_location,struct Program *program,...);
struct token* get_eof_token();
struct token* get_token_from_two_adjacent_strings(struct token_string *first,struct token_string *second);
struct token* get_token_from_two_strings_with_a_space_between(struct token_string *first,struct token_string *second);
- struct token* get_file_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
- struct token* get_date_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
- struct token* get_line_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
- struct token* get_stdc_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
- struct token* get_stdc_hosted_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
- struct token* get_stdc_version_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
- struct token* get_time_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
+ struct token* get_file_macro_token(struct Source_Location *current_location);
+ struct token* get_date_macro_token(struct Source_Location *current_location);
+ struct token* get_line_macro_token(struct Source_Location *current_location);
+ struct token* get_stdc_macro_token(struct Source_Location *current_location);
+ struct token* get_stdc_hosted_macro_token(struct Source_Location *current_location);
+ struct token* get_stdc_version_macro_token(struct Source_Location *current_location);
+ struct token* get_time_macro_token(struct Source_Location *current_location);
char* get_string_from_token(struct token* token,size_t *size);
_Bool token_is_keyword(struct token *token);