WONKY



LOG | FILES | OVERVIEW


F diff --git a/src/environment/error/gcc_error.c b/src/environment/error/gcc_error.c --- a/src/environment/error/gcc_error.c +++ b/src/environment/error/gcc_error.c
line,column,filename,filename_size,fmt,args);
s=wonky_string_stream(hold->message);
wonky_fseek(&s,0,SEEK_END);
+ wonky_fprintf(&s,"\n\t> %WPl%WIC",translation_data->token_pointer,translation_data->token_pointer);
+ Queue_Push(translation_data->program->errors,hold);
+ wonky_string_stream_delete(&s);
+ }
+ #warning add semicolon specific error function to handle new line that is gotten instead of a semicolon
+ void push_translation_error_located(const char *fmt,struct Translation_Data *translation_data,struct Source_Location *loc,...)
+ {
+ va_list args;
+ va_start(args,loc);
+ push_translation_error_located_vargs(fmt,translation_data,loc,args);
+ va_end(args);
+ }
+ void push_translation_error_located_vargs(const char *fmt,struct Translation_Data *translation_data,struct Source_Location *loc,va_list args)
+ {
+ struct wonky_stream s;
+ struct Wonky_Message *hold;
+
+ hold=get_wonky_message_vargs(WONKY_MESSAGE_TYPE_ERROR,
+ WONKY_MESSAGE_SOURCE_TRANSLATION,
+ loc->line,loc->column,loc->src->src_name->name,loc->src->src_name->name_size,fmt,args);
+ s=wonky_string_stream(hold->message);
+ wonky_fseek(&s,0,SEEK_END);
wonky_fprintf(&s,"%WPl%WIC",translation_data->token_pointer,translation_data->token_pointer);
Queue_Push(translation_data->program->errors,hold);
wonky_string_stream_delete(&s);
}
+
void push_translation_note(const char *fmt,struct Translation_Data *translation_data,...)
{
va_list args;
F diff --git a/src/environment/error/gcc_error.h b/src/environment/error/gcc_error.h --- a/src/environment/error/gcc_error.h +++ b/src/environment/error/gcc_error.h
void push_translation_error(const char *fmt,struct Translation_Data *translation_data,...);
void push_translation_error_vargs(const char *fmt,struct Translation_Data *translation_data,va_list args);
+
+ void push_translation_error_located(const char *fmt,struct Translation_Data *translation_data,struct Source_Location *loc,...);
+ void push_translation_error_located_vargs(const char *fmt,struct Translation_Data *translation_data,struct Source_Location *loc,va_list args);
+
void push_translation_note(const char *fmt,struct Translation_Data *translation_data,...);
void push_translation_note_vargs(const char *fmt,struct Translation_Data *translation_data,va_list args);
F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c --- a/src/frontend/lex/lex_preprocessing_directive.c +++ b/src/frontend/lex/lex_preprocessing_directive.c
hold_lexerdata_state=lexer_data->is_in_if_directive_body; /*=(*/
lexer_data->is_in_if_directive_body=1;
- /* За мен: тука може да му пуснеш типа на токена и да парсваш спрямо него.
- * Не е супер сложното парсване тъй че може да го правиш линейно. (По-оптимално
- * е спрямо стека така или иначе :Р
- * НОВО: направо не давай типа, а направо си парсвай всичко в една фунцкия!
- * */
-
while((hold_token=lexer_extract_next_token(lexer_data))!=NULL)
{
if(hold_token->type==PKW_ELSE)
}else if(token_is_keyword(hold_token))
{
hold_id=((struct token_keyword*)hold_token)->id;
+ }else
+ {
+ return get_error_token("Id exprected after #define directive",where,lexer_data->program);
}
#warning make lexer_data_check_char(lexer_data,CHAR_OPEN_NORMAL) so we check for '(' directly next to id
F diff --git a/src/frontend/lex/lexer.c b/src/frontend/lex/lexer.c --- a/src/frontend/lex/lexer.c +++ b/src/frontend/lex/lexer.c
{
token=lexer_extract_next_token(lexer_data);
if(token!=NULL)
+ {
push_token_into_preprocessing_translation_unit(unit,token);
+ }
}
return unit;
F diff --git a/src/frontend/parse/parse_declaration.c b/src/frontend/parse/parse_declaration.c --- a/src/frontend/parse/parse_declaration.c +++ b/src/frontend/parse/parse_declaration.c
hold=parse_declarator(translation_data,scope,prototype);
if(has_new_errors(translation_data))
{
- chase_next_semicolumn(translation_data);
+ chase_next_semicolon(translation_data);
}
else if(hold->denotation==DT_Function)
{
ret->type=((struct Denoted_Type*)hold)->type;
chomp(translation_data);
return ret;
+ }else
+ {
+ push_translation_error("Expected type got %WI",translation_data,id);
}
- /*falltrough - this has not been typedefed*/
+ /*This was a falltrough before*/
}
/*falltrough (it is possible to overwrite typedef id from upper scope)*/
default:
F diff --git a/src/frontend/parse/parse_statement.c b/src/frontend/parse/parse_statement.c --- a/src/frontend/parse/parse_statement.c +++ b/src/frontend/parse/parse_statement.c
Queue_Push(hold->components,parse_statement(translation_data,hold->scope,parse_data));
if(has_new_errors(translation_data))
- chase_next_semicolumn(translation_data);
+ chase_next_semicolon(translation_data);
}
wonky_assert(is_valid_compound_statement(hold));
return (struct AST*)get_error_tree(hold);
}
}
- void chase_next_semicolumn(struct Translation_Data *translation_data)
+ void chase_next_semicolon(struct Translation_Data *translation_data)
{
/*chase ; and start parsing next declaration*/
while(!get_and_check(translation_data,KW_SEMICOLON) && !get_and_check(translation_data,KW_CLOSE_CURLY) &&
chomp(translation_data);
}
}
+ _Bool chase_next_semicolon_if_needed(struct Translation_Data *translation_data)
+ {
+ if(has_new_errors(translation_data))
+ {
+ if(translation_data->program->continue_to_parse)
+ chase_next_semicolon(translation_data);
+ else
+ return 1;
+ }
+ return 0;
+ }
#endif
F diff --git a/src/frontend/parse/parse_statement.h b/src/frontend/parse/parse_statement.h --- a/src/frontend/parse/parse_statement.h +++ b/src/frontend/parse/parse_statement.h
struct AST* parse_finish_default_statement(struct Translation_Data* translation_data,struct Scope *scope,struct Parse_Statement_Data *parse_data);
- void chase_next_semicolumn(struct Translation_Data *translation_data);
+ void chase_next_semicolon(struct Translation_Data *translation_data);
+ _Bool chase_next_semicolon_if_needed(struct Translation_Data *translation_data);
#endif
F diff --git a/src/frontend/parse/parse_translation_unit.c b/src/frontend/parse/parse_translation_unit.c --- a/src/frontend/parse/parse_translation_unit.c +++ b/src/frontend/parse/parse_translation_unit.c
{
if(is_type_name(translation_data,hold->file_scope) || kw_get(translation_data)==KW_ID)
{
+ if(chase_next_semicolon_if_needed(translation_data))
+ break;
parse_external_definition(translation_data,hold);
- if(has_new_errors(translation_data))
- {
- if(translation_data->program->continue_to_parse)
- chase_next_semicolumn(translation_data);
- else
- break;
- }
+ if(chase_next_semicolon_if_needed(translation_data))
+ break;
}else
{
- push_translation_error("XX declaration expected [%WPl]",translation_data,translation_data->token_pointer);
- chase_next_semicolumn(translation_data);
+ push_translation_error("Declaration expected %WPl",translation_data,translation_data->token_pointer);
+ if(chase_next_semicolon_if_needed(translation_data))
+ break;
}
}
F diff --git a/src/misc/map.c b/src/misc/map.c --- a/src/misc/map.c +++ b/src/misc/map.c
if(temp == size)
{
+ void *ret=tree->ID;
if(!tree->is_final)
{
tree->ID=id;
tree->is_final=1;
}
- return tree->ID;
+ return ret;
}
for(temp;temp<size;++temp)
{
F diff --git a/src/misc/wonky_stream.c b/src/misc/wonky_stream.c --- a/src/misc/wonky_stream.c +++ b/src/misc/wonky_stream.c
struct Token_Pointer *tp=va_arg(args,struct Token_Pointer*);
_Bool print_line=0;
wonky_assert(tp);
- wonky_fprintf(s,"%WPc\n",tp->context);
+ wonky_fprintf(s,"CURRENT %WPc\n",tp->context);
for(struct Stack_Node *it=tp->call_stack->first;it;it=it->next)
wonky_fprintf(s,"%WPc\n",it->data);
struct Token_Pointer_Context *ctx;
wonky_assert(tp && tp->context);
- if(tp->context->ungeted_tokens && tp->context->ungeted_tokens->size>0)
- {
- wonky_assert(tp->context->ungeted_tokens->first &&
- tp->context->ungeted_tokens->first->data);
- wonky_fprintf(s,"\n\t> %Wtl",tp->context->ungeted_tokens->first->data);
- }else if(!tp->context->executed_macro_id && tp->context->current_token_node)
- {
- wonky_assert(tp->context->current_token_node->data);
- wonky_fprintf(s,"\n\t> %Wtl",tp->context->current_token_node->data);
- }else
+ size_t begining_of_real_line=tp->context->byte;
+ size_t ending_of_real_line=tp->context->byte;
+
+
+ ctx=tp->context;
+
+ if(ctx->current_token_node)
{
- for(struct Stack_Node *it=tp->call_stack->first;it;it=it->next)
- {
- wonky_assert(it && it->data);
- ctx=it->data;
- if(ctx->ungeted_tokens && ctx->ungeted_tokens->size>0)
- {
- wonky_assert(ctx->ungeted_tokens->first &&
- ctx->ungeted_tokens->first->data);
- wonky_fprintf(s,"\n\t> %Wtl",ctx->ungeted_tokens->first->data);
- break;
- }else if(!ctx->executed_macro_id && ctx->current_token_node)
- {
- wonky_assert(ctx->current_token_node->data);
- wonky_fprintf(s,"\n\t> %Wtl",ctx->current_token_node->data);
- break;
- }
- }
+ struct token *token;
+
+ token=(struct token*)ctx->current_token_node->data;
+
+ for(begining_of_real_line ;begining_of_real_line && token->location->src->src[begining_of_real_line]!='\n' ;--begining_of_real_line);
+ for(ending_of_real_line ;ending_of_real_line<token->location->src->src_size && token->location->src->src[ending_of_real_line]!='\n' ;++ending_of_real_line);
+
+ begining_of_real_line+=(token->location->src->src[begining_of_real_line]=='\n');
+ wonky_assert(begining_of_real_line<ending_of_real_line);
+ wonky_write(s,token->location->src->src+begining_of_real_line,ending_of_real_line-begining_of_real_line);
}
+
+
}
break;
case WONKY__CONVERSION_WONKY_MACRO_ARGUMENT:
size_t ending_of_real_line=token->location->starting_byte_index;
for(begining_of_real_line ;begining_of_real_line && token->location->src->src[begining_of_real_line]!='\n' ;--begining_of_real_line);
for(ending_of_real_line ;ending_of_real_line<token->location->src->src_size && token->location->src->src[ending_of_real_line]!='\n' ;++ending_of_real_line);
- begining_of_real_line+=!!(begining_of_real_line);
+ begining_of_real_line+=(token->location->src->src[begining_of_real_line]=='\n');
wonky_assert(begining_of_real_line<ending_of_real_line);
wonky_write(s,token->location->src->src+begining_of_real_line,ending_of_real_line-begining_of_real_line);
}
F diff --git a/src/semantics/constraints/linkage_constraints.c b/src/semantics/constraints/linkage_constraints.c --- a/src/semantics/constraints/linkage_constraints.c +++ b/src/semantics/constraints/linkage_constraints.c
{
if(hold_object->type!=ST_OBJECT_DECLARATION)
{
- push_translation_error("linking an object to a function %D",translation_data,denoted_object);
- push_translation_note("linking against %D",translation_data,hold_object);
+ push_translation_error("linking an object to a function %Wd",translation_data,denoted_object);
+ push_translation_note("linking against %Wd",translation_data,hold_object);
return 0;
}else if(!types_are_identical(hold_object->object->object->type,denoted_object->object->type))
{
push_translation_error("linking objects with mismatching types",translation_data);
- push_translation_note("%t has type %T",translation_data,denoted_object->id,denoted_object->object->type);
+ push_translation_note("%WI has type %WT",translation_data,denoted_object->id,denoted_object->object->type);
push_translation_note("whilst %t has type %T",translation_data,hold_object->object->id,hold_object->object->object->type);
return 0;
}else
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c --- a/src/semantics/program/translation_unit.c +++ b/src/semantics/program/translation_unit.c
struct token *hold_hold_token;
struct token *hold_hold_hold_token;
+
hold_token=token_ptr_get_token_under_pointer_inner(token_pointer);
if(hold_token->type==KW_STRING && token_pointer->state!=TOKEN_POINTER_STATE_PREPROCESSING)
hold_token=get_token_from_two_adjacent_strings((struct token_string*)hold_hold_token,(struct token_string*)hold_token);
}
}
-
-
return hold_token;
}
struct token* token_ptr_get_token_under_pointer_inner(struct Token_Pointer *token_pointer)
{
struct token *hold_token;
+
if(!token_ptr_has_remaining_tokens(token_pointer))
return get_eof_token();
- if(token_ptr_has_buffered_tokens(token_pointer))
- return token_ptr_get_buffered_token(token_pointer);
-
token_ptr_goto_next_normal_token(token_pointer,1);
if(!token_ptr_has_remaining_tokens(token_pointer))
return get_eof_token();
- if(token_ptr_has_buffered_tokens(token_pointer)) /*A special macro might buffer a token*/
- return token_ptr_get_buffered_token(token_pointer);
-
hold_token=(struct token*)token_pointer->context->current_token_node->data;
-
token_ptr_goto_next_token(token_pointer);
- //token_ptr_assume_location_of_token(token_pointer,hold_token);
+ token_ptr_assume_location_of_token(token_pointer,hold_token);
return hold_token;
}
void token_ptr_goto_next_normal_token(struct Token_Pointer *token_pointer,_Bool assume_location)
{
struct token *hold_token;
- if(token_ptr_has_buffered_tokens(token_pointer))
- return;
-
while(1)
{
while(!token_ptr_has_remaining_tokens_in_current_context(token_pointer) &&
token_pointer->call_stack->size>0)
+ {
token_ptr_pop_context(token_pointer);
+ }
if(token_pointer->context==NULL || token_pointer->context->current_token_node==NULL)
return;
token_ptr_goto_next_normal_token(token_pointer,0);
- if(token_ptr_has_buffered_tokens(token_pointer))
- return token_ptr_check_buffered_token(token_pointer);
-
if(!token_ptr_has_remaining_tokens(token_pointer))
return get_eof_token();
else
}
void token_ptr_goto_next_token(struct Token_Pointer *token_pointer)
{
- if(token_ptr_has_remaining_tokens(token_pointer) && !token_ptr_has_buffered_tokens(token_pointer))
+ if(token_ptr_has_remaining_tokens(token_pointer))
{
token_pointer->context->current_token_node=token_pointer->context->current_token_node->prev;
--token_pointer->context->number_of_remaining_tokens;
struct Token_Pointer *ret;
ret=wonky_malloc(sizeof(struct Token_Pointer));
- ret->context=get_token_ptr_context(unit->tokens->first,unit->tokens->size,1,0,0);
+ ret->context=get_token_ptr_context(unit->tokens->first,unit->tokens->size,1,0,0,0);
ret->call_stack=wonky_malloc(sizeof(struct Stack));
ret->state=TOKEN_POINTER_STATE_NORMAL;
ret->is_in_conditional_directive=0;
+ ret->ungeted_tokens=wonky_malloc(sizeof(struct Queue));
+
Stack_Init(ret->call_stack);
+ Queue_Init(ret->ungeted_tokens);
ret->program=program;
struct Scope *dummy_scope;
+
token_ptr_goto_next_token(ptr);
token_ptr_jump_to_first(ptr,if_directive->controlling_expression,0);
if(evaluate_const_expression_integer(control,dummy_data)!=0)
{
+ wonky_assert(if_directive->if_true!=NULL);
if(if_directive->if_true->size!=0)
token_ptr_jump_to_first(ptr,if_directive->if_true,0);
}else
{
- if(if_directive->if_false!=NULL)
+ if(if_directive->if_false && if_directive->if_false->size)
token_ptr_jump_to_first(ptr,if_directive->if_false,0);
}
struct token *ret;
size_t left_size,right_size;
+
left_size=left->location->length;
right_size=right->location->length;
if(left_size==0)
token_ptr_set_barrier(ptr);
token_ptr_goto_next_token(ptr);
+ token_ptr_clear_barrier(ptr);
hold_left=(struct token*)op->operands->first->data;
}
}
token_ptr_unget_token(ptr,(struct token*)hold_left);
-
- token_ptr_clear_barrier(ptr);
}
void token_ptr_load_functionlike_macro_arguments_with_tokens(struct Token_Pointer *ptr,struct functionlike_define_directive *macro)
{
return 1;
case KW_ID:
{
- wonky_printf("hitting id token %Wt",token);
struct token_identifier *hold_id_token;
hold_id_token=(struct token_identifier*)token;
if( id_is_a_macro(
return 1;
}else if(token_pointer->is_in_conditional_directive)
{
- wonky_printf("id token %Wt is transformed into a '0'\n!",token);
token_ptr_goto_next_token(token_pointer);
/*all undefined id tokens in control expression are replaced with 0*/
token_ptr_unget_token(token_pointer,(struct token*)get_constant_token(
}
void delete_token_ptr(struct Token_Pointer *ptr)
{
+ #warning BEWARE: there is a dummy translation data made for #if directive control statements that kinda inherit the real tokenptr
wonky_free(ptr);
}
- struct Token_Pointer_Context* get_token_ptr_context(struct Queue_Node *start,size_t number_of_remaining_tokens,_Bool is_file_inclusion,size_t line,size_t column)
+ struct Token_Pointer_Context* get_token_ptr_context(struct Queue_Node *start,size_t number_of_remaining_tokens,_Bool is_file_inclusion,size_t line,size_t column,size_t byte)
{
struct Token_Pointer_Context *ret;
struct Source_Location *hold_location;
ret->current_token_node=start;
ret->number_of_remaining_tokens=number_of_remaining_tokens;
- ret->ungeted_tokens=wonky_malloc(sizeof(struct Queue));
hold_location=((struct token*)start->data)->location;
//ret->line=hold_location->line;
ret->line=line;
ret->column=column;
+ ret->byte=byte;
ret->filename=hold_location->src->src_name->name;
ret->filename_size=hold_location->src->src_name->name_size;
ret->executed_macro_id=NULL;
ret->saved_macro=NULL;
ret->is_file_inclusion=is_file_inclusion;
-
- Queue_Init(ret->ungeted_tokens);
return ret;
}
void token_ptr_pop_context(struct Token_Pointer *ptr)
{
if(ptr->state==TOKEN_POINTER_STATE_ERROR)
return 0;
- if(token_ptr_has_buffered_tokens(ptr))
- {
- return 1;
- }else
- {
- while(!token_ptr_has_remaining_tokens_in_current_context(ptr)
- && ptr->call_stack->size>0
- && !ptr->context->barrier)
- token_ptr_pop_context(ptr);
- return token_ptr_has_remaining_tokens_in_current_context(ptr);
- }
+ while(!token_ptr_has_remaining_tokens_in_current_context(ptr)
+ && ptr->call_stack->size>0
+ && !ptr->context->barrier)
+ token_ptr_pop_context(ptr);
+
+
+ return token_ptr_has_remaining_tokens_in_current_context(ptr);
}
_Bool token_ptr_has_remaining_tokens_in_current_context(struct Token_Pointer *ptr)
{
wonky_assert(ptr->context!=NULL);
- return ptr->context->number_of_remaining_tokens!=0 || ptr->context->ungeted_tokens->size!=0;
+ return ptr->context->number_of_remaining_tokens!=0;
}
void token_ptr_unget_token(struct Token_Pointer *ptr,struct token *token)
{
- wonky_printf("pushing token into ungetted tokens %Wt",token);
- Queue_Push(ptr->context->ungeted_tokens,token);
+ Queue_Push(ptr->ungeted_tokens,token);
+ token_ptr_jump_to(ptr,ptr->ungeted_tokens->last,1,0);
}
void token_ptr_assume_location_of_token(struct Token_Pointer *ptr,struct token *token)
{
wonky_assert(token!=NULL);
ptr->context->line=token->location->line;
ptr->context->column=token->location->column;
- }
- _Bool token_ptr_has_buffered_tokens(struct Token_Pointer *ptr)
- {
- return ptr->context->ungeted_tokens->size!=0;
- }
- struct token* token_ptr_get_buffered_token(struct Token_Pointer *ptr)
- {
- if(token_ptr_has_buffered_tokens(ptr))
- {
- struct token *hold_token;
- hold_token=Queue_Pop(ptr->context->ungeted_tokens);
-
- return hold_token;
- }
- else
- return NULL;
+ ptr->context->byte=token->location->starting_byte_index;
}
struct Queue_Node* token_ptr_get_current_queue_node(struct Token_Pointer *ptr)
{
return ptr->context->current_token_node;
}
- struct token* token_ptr_check_buffered_token(struct Token_Pointer *ptr)
- {
- if(token_ptr_has_buffered_tokens(ptr))
- return (struct token*)ptr->context->ungeted_tokens->first->data;
- else
- return NULL;
- }
void token_ptr_set_barrier(struct Token_Pointer *ptr)
{
wonky_assert(ptr && ptr->context && !ptr->context->barrier);
push_token_pointer_error(ptr,"Preprocessing bounds exceeded");
return;
}
- new_context=get_token_ptr_context(where_to,number_of_remaining_tokens,is_file_inclusion,ptr->context->line,ptr->context->column);
+ new_context=get_token_ptr_context(where_to,number_of_remaining_tokens,is_file_inclusion,ptr->context->line,ptr->context->column,ptr->context->byte);
Stack_Push(ptr->call_stack,ptr->context);
ptr->context=new_context;
}
push_token_pointer_error(ptr,"Preprocessing bounds exceeded");
return;
}
- new_context=get_token_ptr_context(arg->first_in_argument_substitution_tokens,arg->number_of_substitution_tokens,0,ptr->context->line,ptr->context->column);
+ new_context=get_token_ptr_context(arg->first_in_argument_substitution_tokens,arg->number_of_substitution_tokens,0,ptr->context->line,ptr->context->column,ptr->context->byte);
token_ptr_store_functionlike_macro_state_into_context(new_context,arg->belongs_to->define);
Stack_Push(ptr->call_stack,ptr->context);
ptr->context=new_context;
}
void token_ptr_jump_to_first(struct Token_Pointer *ptr,struct Queue *queue,_Bool is_file_inclusion)
{
- token_ptr_jump_to(ptr,queue->first,queue->size,is_file_inclusion);
+ if(queue && queue->size)
+ {
+ token_ptr_jump_to(ptr,queue->first,queue->size,is_file_inclusion);
+ }
}
void delete_token_ptr_context(struct Token_Pointer_Context *context)
{
F diff --git a/src/semantics/program/translation_unit.h b/src/semantics/program/translation_unit.h --- a/src/semantics/program/translation_unit.h +++ b/src/semantics/program/translation_unit.h
struct Queue_Node *current_token_node;
size_t number_of_remaining_tokens;
- struct Queue *ungeted_tokens;
size_t line;
size_t column;
+ size_t byte;
char *filename;
size_t filename_size;
enum Token_Pointer_State state;
_Bool is_in_conditional_directive;/*TODO move this into the state*/
+ struct Queue *ungeted_tokens;
unsigned int next_barrier_number;
};
struct Token_Pointer* get_token_ptr(struct Preprocessing_Translation_Unit *unit,struct Program *program);
void delete_token_ptr(struct Token_Pointer *ptr);
- struct Token_Pointer_Context* get_token_ptr_context(struct Queue_Node *start,size_t number_of_remaining_tokens,_Bool is_file_inclusion,size_t line,size_t column);
+ struct Token_Pointer_Context* get_token_ptr_context(struct Queue_Node *start,size_t number_of_remaining_tokens,_Bool is_file_inclusion,size_t line,size_t column,size_t byte);
void token_ptr_pop_context(struct Token_Pointer *ptr);
_Bool token_ptr_has_remaining_tokens(struct Token_Pointer *ptr);
void token_ptr_assume_location_of_token(struct Token_Pointer *ptr,struct token *token);
- _Bool token_ptr_has_buffered_tokens(struct Token_Pointer *ptr);
- struct token* token_ptr_get_buffered_token(struct Token_Pointer *ptr);
- struct token* token_ptr_check_buffered_token(struct Token_Pointer *ptr);
struct Queue_Node* token_ptr_get_current_queue_node(struct Token_Pointer *ptr);