WONKY



LOG | FILES | OVERVIEW


#ifndef WONKY_TRANSLATION_UNIT_C
#define WONKY_TRANSLATION_UNIT_C WONKY_TRANSLATION_UNIT_C
#include <translation_unit.h>

struct Preprocessing_Translation_Unit* get_preprocessing_translation_unit(struct Source_File *source)
{
	struct Preprocessing_Translation_Unit *ret;
	ret=wonky_malloc(sizeof(struct Preprocessing_Translation_Unit));
	ret->tokens=wonky_malloc(sizeof(struct Queue));
	Queue_Init(ret->tokens);

	return ret;
}
void delete_preprocessing_translation_unit(struct Preprocessing_Translation_Unit *unit)
{
	return;
}
void delete_preprocessing_translation_unit_but_not_the_tokens(struct Preprocessing_Translation_Unit *unit)
{
	return;
}
void push_token_into_preprocessing_translation_unit(struct Preprocessing_Translation_Unit *unit,struct token *token)
{
	Queue_Push(unit->tokens,token);
}
struct token* token_ptr_get_token_under_pointer_in_preprocessing_directive(struct Token_Pointer *token_pointer)
{

}
struct token* token_ptr_get_token_under_pointer(struct Token_Pointer *token_pointer)
{
	struct token *hold_token;
	struct token *hold_hold_token;
	struct token *hold_hold_hold_token;

	hold_token=token_ptr_get_token_under_pointer_inner(token_pointer);

	if(hold_token->type==KW_STRING && token_pointer->state!=TOKEN_POINTER_STATE_PREPROCESSING)
	{
		while( (hold_hold_hold_token=token_ptr_check_next_normal_token(token_pointer))->type==KW_STRING 
				|| hold_hold_hold_token->type==PKW_FILE_MACRO
		) 
		{
			hold_hold_token=hold_token;
			hold_token=token_ptr_get_token_under_pointer_inner(token_pointer);	
			hold_token=get_token_from_two_adjacent_strings((struct token_string*)hold_hold_token,(struct token_string*)hold_token);
		}
	}

	
	return hold_token;
}
struct token* token_ptr_get_token_under_pointer_inner(struct Token_Pointer *token_pointer)
{
	struct token *hold_token;
	if(!token_ptr_has_remaining_tokens(token_pointer))
		return get_eof_token();

	if(token_ptr_has_buffered_tokens(token_pointer))
		return token_ptr_get_buffered_token(token_pointer);

	token_ptr_goto_next_normal_token(token_pointer);

	if(!token_ptr_has_remaining_tokens(token_pointer))
		return get_eof_token();

	if(token_ptr_has_buffered_tokens(token_pointer)) /*A special macro might buffer a token*/
		return token_ptr_get_buffered_token(token_pointer);

	hold_token=(struct token*)token_pointer->context->current_token_node->data;


	token_ptr_goto_next_token(token_pointer);

	token_ptr_assume_location_of_token(token_pointer,hold_token);

	return hold_token;
}
void token_ptr_goto_next_normal_token(struct Token_Pointer *token_pointer)
{
	struct token *hold_token;
	if(token_ptr_has_buffered_tokens(token_pointer))
		return;


	while(1)
	{
		while(!token_ptr_has_remaining_tokens_in_current_context(token_pointer) &&
			       	token_pointer->call_stack->size>0)
			token_ptr_pop_context(token_pointer);

		if(token_pointer->context==NULL || token_pointer->context->current_token_node==NULL)
			return;

		hold_token=(struct token*)token_pointer->context->current_token_node->data;
		wonky_assert(hold_token!=NULL);
		token_ptr_assume_location_of_token(token_pointer,hold_token);

		if(!token_ptr_do_preprocessing_stuff(token_pointer,hold_token))
			return;
	}
	wonky_assert(SHOULD_NOT_REACH_HERE);
}
struct token* token_ptr_check_next_normal_token(struct Token_Pointer *token_pointer)
{
	struct token *ret;

	token_ptr_goto_next_normal_token(token_pointer);

	if(token_ptr_has_buffered_tokens(token_pointer))
		return token_ptr_check_buffered_token(token_pointer);

	if(!token_ptr_has_remaining_tokens(token_pointer))
		return get_eof_token();
	else
		return (struct token*)token_pointer->context->current_token_node->data;
}
void token_ptr_goto_next_token(struct Token_Pointer *token_pointer)
{
	if(token_ptr_has_remaining_tokens(token_pointer) && !token_ptr_has_buffered_tokens(token_pointer))
	{
		token_pointer->context->current_token_node=token_pointer->context->current_token_node->prev;	
		--token_pointer->context->number_of_remaining_tokens;
	}
}

struct Token_Pointer* get_token_ptr(struct Preprocessing_Translation_Unit *unit,struct Program *program)
{
	struct Token_Pointer *ret;

	ret=wonky_malloc(sizeof(struct Token_Pointer));
	ret->context=get_token_ptr_context(unit->tokens->first,unit->tokens->size,1);
	ret->call_stack=wonky_malloc(sizeof(struct Stack));
	ret->state=TOKEN_POINTER_STATE_NORMAL;
	ret->is_in_conditional_directive=0;

	Stack_Init(ret->call_stack);

	ret->program=program;
	
	return ret;
}

void token_ptr_execute_include_directive(struct Token_Pointer *ptr,struct token_include_directive *include_directive)
{
	struct token *hold_token;
	struct Preprocessing_Translation_Unit *hold_unit;

	wonky_assert(include_directive->tokens->first);

	token_ptr_goto_next_token(ptr);

	token_ptr_jump_to_first(ptr,include_directive->tokens,0);

	token_ptr_set_barrier(ptr);
	ptr->state=TOKEN_POINTER_STATE_PREPROCESSING;
	hold_token=token_ptr_get_token_under_pointer(ptr);
	ptr->state=TOKEN_POINTER_STATE_NORMAL;
	token_ptr_clear_barrier(ptr);

	if(hold_token->type==KW_STRING)
	{
		char *include_name;
		if(ptr->context->current_token_node!=NULL)
		{
			push_token_pointer_error(ptr,"Unexpected token at end of include directive");
			return;
		}
		include_name=((struct token_string*)hold_token)->constant->value;
		hold_unit=program_get_translation_unit(ptr->program,include_name,gstrnlen(include_name,100));

		if(hold_unit==NULL)
		{

			token_ptr_pop_context(ptr);
			push_token_pointer_error(ptr,"Failed to execute include directive %Wtl",include_directive);
			if(ptr->call_stack->size)
				push_generic_note(ptr->program,"%WIC",ptr);
			return;
		}

		if(hold_unit->tokens->first)
			token_ptr_jump_to_first(ptr,hold_unit->tokens,1);
		else
			token_ptr_pop_context(ptr);
	}else if(hold_token->type==KW_LESS)
	{
		struct wonky_str include_name;
		struct wonky_stream strs;

		include_name=wonky_string_make();

		strs=wonky_string_stream(&include_name);
		token_ptr_set_barrier(ptr);
		ptr->state=TOKEN_POINTER_STATE_PREPROCESSING;
		hold_token=token_ptr_get_token_under_pointer(ptr);
		while(hold_token->type!=KW_MORE && hold_token->type!=KW_MORE_EQ)
		{

			print_raw_token_text(&strs,hold_token);
			hold_token=token_ptr_get_token_under_pointer(ptr);
		}
		ptr->state=TOKEN_POINTER_STATE_NORMAL;
		token_ptr_clear_barrier(ptr);
		wonky_stream_delete(&strs);

		if(ptr->context->current_token_node!=NULL)
		{
			push_token_pointer_error(ptr,"Unexpected token at end of include directive");
			return;
		}

		hold_unit=program_get_translation_unit(ptr->program,include_name.cs,wonky_string_length(include_name));

		if(hold_unit==NULL)
		{
			token_ptr_pop_context(ptr);
			push_token_pointer_error(ptr,"Failed to execute include directive %Wtl",include_directive);
			if(ptr->call_stack->size)
				push_generic_note(ptr->program,"%WIC",ptr);
			return;
		}

		if(hold_unit->tokens->first)
			token_ptr_jump_to_first(ptr,hold_unit->tokens,1);
		else
			token_ptr_pop_context(ptr);
	}else if(hold_token->type==KW_LESS_EQ) /*implementation defined*/
	{
		push_token_pointer_error(ptr,"'=' is not supported inside filename in include directive with angular brackets and macro expansion");
		return;
	}



}
void token_ptr_execute_if_directive(struct Token_Pointer *ptr,struct token_if_directive *if_directive)
{
	struct AST *control;
	struct Translation_Data *dummy_data;
	struct Scope *dummy_scope;


	token_ptr_goto_next_token(ptr);
	token_ptr_jump_to_first(ptr,if_directive->controlling_expression,0);


	dummy_data=get_dummy_translation_data_for_parsing_const_expressions(ptr);
	dummy_scope=get_normal_scope(NULL,BLOCK_SCOPE);

	ptr->state=TOKEN_POINTER_STATE_PREPROCESSING;
	ptr->is_in_conditional_directive=1;
	token_ptr_set_barrier(ptr);

	control=parse_expression(dummy_data,dummy_scope);

	token_ptr_clear_barrier(ptr);
	ptr->is_in_conditional_directive=0;
	ptr->state=TOKEN_POINTER_STATE_NORMAL;

	if(token_ptr_has_remaining_tokens_in_current_context(ptr))
	{
		push_token_pointer_error(ptr,"Remaining tokens in #if directive after control expression\n");
		token_ptr_pop_context(ptr);
	}

	if(evaluate_const_expression_integer(control,dummy_data)!=0)
	{
		if(if_directive->if_true->size!=0)
			token_ptr_jump_to_first(ptr,if_directive->if_true,0);
	}else
	{
		if(if_directive->if_false!=NULL)
			token_ptr_jump_to_first(ptr,if_directive->if_false,0);
	}


	/*As of writing these don't do anything*/
	delete_ast(control);
	delete_translation_data(dummy_data);
	delete_scope(dummy_scope);
}
void token_ptr_execute_ifdef_directive(struct Token_Pointer *ptr,struct token_ifdefndef_directive *ifdef_directive)
{
	token_ptr_goto_next_token(ptr);
	if(token_is_a_macro(ifdef_directive->id,ptr->program->current_translation_unit_number))
		token_ptr_jump_to_first(ptr,ifdef_directive->if_defined,0);
	else
		token_ptr_jump_to_first(ptr,ifdef_directive->if_undefined,0);
}
void token_ptr_execute_ifndef_directive(struct Token_Pointer *ptr,struct token_ifdefndef_directive *ifndef_directive)
{
	token_ptr_goto_next_token(ptr);
	if(token_is_a_macro(ifndef_directive->id,ptr->program->current_translation_unit_number))
		token_ptr_jump_to_first(ptr,ifndef_directive->if_defined,0);
	else
		token_ptr_jump_to_first(ptr,ifndef_directive->if_undefined,0);
}
void token_ptr_execute_normal_define_directive(struct Token_Pointer *ptr,struct token_normal_define_directive *define_directive)
{
	define_directive->define->id->number_of_translation_unit_where_id_was_last_defined_as_a_macro
		=
	ptr->program->current_translation_unit_number;

	define_directive->define->id->last_defined_macro_with_this_id=(struct token*)define_directive;

	token_ptr_goto_next_token(ptr);
}
void token_ptr_execute_functionlike_define_directive(struct Token_Pointer *ptr,struct token_functionlike_define_directive *define_directive)
{
	define_directive->define->id->number_of_translation_unit_where_id_was_last_defined_as_a_macro
		=
	ptr->program->current_translation_unit_number;

	define_directive->define->id->last_defined_macro_with_this_id=(struct token*)define_directive;

	token_ptr_goto_next_token(ptr);
}
void token_ptr_execute_undef_directive(struct Token_Pointer *ptr,struct token_undef_directive *undef_directive)
{
	struct identifier *id;
	token_ptr_goto_next_token(ptr);
	if(undef_directive->id->type==KW_ID)
	{
		id=((struct token_identifier*)undef_directive->id)->id;	
	}else if(token_is_keyword(undef_directive->id))
	{
		id=((struct token_keyword*)undef_directive->id)->id;	
	}else
	{
		wonky_assert(SHOULD_NOT_REACH_HERE);
	}
	if(id!=NULL)
	{
		id->number_of_translation_unit_where_id_was_last_defined_as_a_macro=0;
	}
}
void token_ptr_execute_line_directive(struct Token_Pointer *ptr,struct token_line_directive *line_directive)
{
	if(line_directive->filename!=NULL)
	{
		ptr->context->filename=line_directive->filename;
		ptr->context->filename_size=line_directive->filename_size;
	}

	ptr->context->line=line_directive->line-2;
	ptr->context->column=0;

	token_ptr_goto_next_token(ptr);
}
void token_ptr_execute_error_directive(struct Token_Pointer *ptr,struct token_error_directive *error_directive)
{
	push_message_struct(ptr->program,error_directive->error_message);
	program_stop_parsing(ptr->program);
	token_ptr_goto_next_token(ptr);
}
void token_ptr_execute_pragma_directive(struct Token_Pointer *ptr,struct token_pragma_directive *pragma_directive)
{
	token_ptr_goto_next_token(ptr);
}
void token_ptr_execute_defined_unary_operator(struct Token_Pointer *ptr,struct token_defined_unary_operator *operator)
{
	struct token_constant *line;
	token_ptr_goto_next_token(ptr);

	if(token_is_a_macro(operator->id,ptr->program->current_translation_unit_number))
	{
		line=(struct token_constant*)get_constant_token(
				KW_DECIMAL_CONSTANT,
				operator->delta->location,
				operator->delta->location,
				"1",
				1);
	}else
	{
		line=(struct token_constant*)get_constant_token(
				KW_DECIMAL_CONSTANT,
				operator->delta->location,
				operator->delta->location,
				"0",
				1);
	}
	Queue_Push(ptr->context->ungeted_tokens,line);
}
void token_ptr_execute_file_special_macro(struct Token_Pointer *ptr,struct token *directive)
{
	struct token_string *file;
	token_ptr_goto_next_token(ptr);
	file=(struct token_string*)get_string_token(
			KW_STRING,
			directive->delta->location,
			directive->delta->location,
			ptr->context->filename,
			ptr->context->filename_size);
	Queue_Push(ptr->context->ungeted_tokens,file);
}
void token_ptr_execute_line_special_macro(struct Token_Pointer *ptr,struct token *directive)
{
	struct token_constant *line;
	token_ptr_goto_next_token(ptr);


	line=(struct token_constant*)get_constant_long_long_int_token(
			directive->delta->location,
			directive->delta->location,
			ptr->context->line+1);
	Queue_Push(ptr->context->ungeted_tokens,line);
}
void token_ptr_execute_stdc_special_macro(struct Token_Pointer *ptr,struct token *directive)
{
	struct token_constant *line;
	token_ptr_goto_next_token(ptr);

	line=(struct token_constant*)get_constant_token(
			KW_DECIMAL_CONSTANT,
			directive->delta->location,
			directive->delta->location,
			"1",
			1);
	Queue_Push(ptr->context->ungeted_tokens,line);
}
void token_ptr_execute_macro(struct Token_Pointer *ptr,struct identifier *id)
{
	id->was_already_expanded_as_a_macro=1;

	token_ptr_goto_next_token(ptr);

	if(id->last_defined_macro_with_this_id->type==PKW_DEFINE)
		token_ptr_execute_normal_macro(ptr,((struct token_normal_define_directive*)id->last_defined_macro_with_this_id)->define);
	else if(id->last_defined_macro_with_this_id->type==PKW_FUNCTIONLIKE_DEFINE)
		token_ptr_execute_functionlike_macro(ptr,((struct token_functionlike_define_directive*)id->last_defined_macro_with_this_id)->define);
	else
		wonky_assert(SHOULD_NOT_REACH_HERE);

}
void token_ptr_execute_normal_macro(struct Token_Pointer *ptr,struct normal_define_directive *macro)
{
	token_ptr_jump_to_first(ptr,macro->replacement_tokens,0);
	ptr->context->executed_macro_id=macro->id;
}
void token_ptr_execute_functionlike_macro(struct Token_Pointer *ptr,struct functionlike_define_directive *macro)
{
	token_ptr_load_functionlike_macro_arguments_with_tokens(ptr,macro);
	token_ptr_jump_to_first(ptr,macro->replacement_tokens,0);
	ptr->context->executed_macro_id=macro->id;
}
void token_ptr_execute_functionlike_macro_argument(struct Token_Pointer *ptr,struct functionlike_define_directive_argument *argument)
{
	token_ptr_goto_next_token(ptr);
	if(argument->number_of_substitution_tokens==0)
		token_ptr_goto_next_token(ptr);
	else
		token_ptr_jump_to_functionlike_macro_argument(ptr,argument);
}

void token_ptr_store_functionlike_macro_state_into_context(struct Token_Pointer_Context *ctx,struct functionlike_define_directive *macro)
{
	struct Queue *save;
	struct functionlike_define_directive_argument *hold;
	struct Queue_Node *it;

	wonky_assert(ctx->has_saved_functionlike_macro_state==0);

	save=wonky_malloc(sizeof(struct Queue));
	Queue_Init(save);

	for(it=macro->arguments->first;it;it=it->prev)
	{
		hold=wonky_malloc(sizeof(struct functionlike_define_directive_argument));
		*hold=*(struct functionlike_define_directive_argument*)it->data;
		Queue_Push(save,hold);
	}

	ctx->has_saved_functionlike_macro_state=1;
	ctx->functionlike_macro_arguments_save_state=save;
	ctx->saved_macro=macro;
	macro->id->was_already_expanded_as_a_macro=0;
}
void token_ptr_load_functionlike_macro_state_from_context(struct Token_Pointer_Context *ctx)
{
	struct Queue_Node *it1,*it2;
	struct functionlike_define_directive *macro=ctx->saved_macro;

	for(it1=ctx->functionlike_macro_arguments_save_state->first,it2=macro->arguments->first;it1&&it2;it1=it1->prev,it2=it2->prev)
	{
		*(struct functionlike_define_directive_argument*)it2->data=*(struct functionlike_define_directive_argument*)it1->data;
		wonky_free(it2->data);
	}
	Queue_Destroy(ctx->functionlike_macro_arguments_save_state);
	wonky_free(ctx->functionlike_macro_arguments_save_state);
	
	ctx->has_saved_functionlike_macro_state=0;
	ctx->functionlike_macro_arguments_save_state=NULL;
	macro->id->was_already_expanded_as_a_macro=1;
	ctx->saved_macro=NULL;
}

void token_ptr_execute_stringify_functionlike_macro_argument(struct Token_Pointer *ptr,struct token_hashtag_unary_operator *op)
{
	struct token_string *hold_string_token;
	struct token *hold_token;
	size_t i;
	Queue_Node *it;

	
	char *hold_str;
	size_t hold_str_size;

	token_ptr_goto_next_token(ptr);

	
	/*This wastes a LOT OF MEMORY. TODO make a temp token allocation scheme*/

	hold_string_token=(struct token_string*)get_string_token(KW_STRING,op->delta->location,op->delta->location,"",0);
	for(it=op->operand->argument->first_in_argument_substitution_tokens,i=0;i<op->operand->argument->number_of_substitution_tokens && it!=NULL;++i,it=it->prev)
	{
		hold_token=(struct token*)it->data;
		wonky_assert(is_valid_token(hold_token));
		hold_str=get_string_from_token(hold_token,&hold_str_size);
		hold_string_token=(struct token_string*)
			get_token_from_two_strings_with_a_space_between(
					hold_string_token,
					(struct token_string*)get_string_token(KW_STRING,hold_token->delta->location,hold_token->delta->location,hold_str,hold_str_size)
				);
	}
	Queue_Push(ptr->context->ungeted_tokens,hold_string_token);
		
}
struct token* token_ptr_execute_concat_two_tokens(struct token *left,struct token *right,struct Program *program)
{
#warning this is a bit slow on consecutive concatenations x##y##z##p 
	struct Source_File *temp_source_file; /*does not get freed*/
	struct Lexer_Data *temp_lexer_data;
	struct Preprocessing_Translation_Unit *hold_token;
	struct token *ret;
	size_t left_size,right_size;

	left_size=left->delta->location->length;
	right_size=right->delta->location->length;
	if(left_size==0)
		return right;
	if(right_size==0)
		return left;


	temp_source_file=get_temp_source_file();
	temp_source_file->src=wonky_malloc(left_size+right_size);
	temp_source_file->src_size=left_size+right_size;

	gmemmove(temp_source_file->src,
		 left->delta->location->src->src+left->delta->location->starting_byte_index,
		 left_size);
	gmemmove(temp_source_file->src+left_size,
		 right->delta->location->src->src+right->delta->location->starting_byte_index,
		 right_size);

	temp_lexer_data=get_lexer_data(temp_source_file,program);

	/*
	  hack to make the lexer not 'lex' a preprocessing directive
	  while after concatenating '#' and 'include' for example
	*/
	temp_lexer_data->is_in_the_begining_of_line=0; 

	hold_token=lex_inner(temp_lexer_data);


	wonky_assert(	hold_token && 
			hold_token->tokens->size==1 &&
		       	hold_token->tokens->first   &&
			hold_token->tokens->first->data);

	ret=hold_token->tokens->first->data;



	delete_lexer_data(temp_lexer_data);
	delete_preprocessing_translation_unit_but_not_the_tokens(hold_token);

	return ret;

}
/*
 * For both object-like and function-like macro invocations, before the replacement list is
 * reexamined for more macro names to replace, each instance of a ## preprocessing token
 * in the replacement list (not from an argument) is deleted and the preceding preprocessing
 * token is concatenated with the following preprocessing token. Placemarker
 * preprocessing tokens are handled specially: concatenation of two placemarkers results in
 * a single placemarker preprocessing token, and concatenation of a placemarker with a
 * non-placemarker preprocessing token results in the non-placemarker preprocessing token.
 * If the result is not a valid preprocessing token, the behavior is undefined. The resulting
 * token is available for further macro replacement. The order of evaluation of ## operators
 * is unspecified.
 */
void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,struct token_hashtag_hastag_operator *op)
{
	struct Preprocessing_Translation_Unit *hold_unit;
	struct token *hold_left,*hold_right;
	struct Queue_Node *it;

	size_t result_size=0;

	token_ptr_set_barrier(ptr);
	token_ptr_goto_next_token(ptr);

	hold_left=(struct token*)op->operands->first->data;

	if(hold_left->type==PKW_MACRO_ARGUMENT)
	{
		/*
		 * if the first token in the concatination is a macro argument chase the last token
		 * and use it for concatination
		 */
		struct token_functionlike_define_argument *arg=(struct token_functionlike_define_argument*)hold_left;
		if(arg->argument->number_of_substitution_tokens)
		{
			size_t i;
			for(i=1,it=arg->argument->first_in_argument_substitution_tokens;i<arg->argument->number_of_substitution_tokens;
					++i,it=it->prev)
			{
				Queue_Push(ptr->context->ungeted_tokens,it->data);
			}
			hold_left=(struct token*)it->data;
		}
	}
	
	for(it=op->operands->first->prev;it;it=it->prev) /*start from the second argument, the first is in hold_left*/
	{
		hold_right=(struct token*)it->data;
		wonky_assert(hold_left!=NULL);
		if(hold_right->type==PKW_MACRO_ARGUMENT)
		{
			struct token_functionlike_define_argument *arg=(struct token_functionlike_define_argument*)hold_right;
			if(arg->argument->number_of_substitution_tokens>1)
			{
				size_t i;

				hold_right=(struct token*)arg->argument->first_in_argument_substitution_tokens->data;
				hold_left=token_ptr_execute_concat_two_tokens(hold_left,hold_right,ptr->program);
				Queue_Push(ptr->context->ungeted_tokens,hold_left);

				for(i=1,it=arg->argument->first_in_argument_substitution_tokens->prev;
						i<arg->argument->number_of_substitution_tokens;
						++i,it=it->prev)
				{
					Queue_Push(ptr->context->ungeted_tokens,it->data);
				}
					hold_left=(struct token*)it->data;
			}else if(arg->argument->number_of_substitution_tokens==1)
			{

				hold_left=token_ptr_execute_concat_two_tokens(hold_left,arg->argument->first_in_argument_substitution_tokens->data,ptr->program);
			}
		}else 
		{
			hold_left=token_ptr_execute_concat_two_tokens(hold_left,hold_right,ptr->program);
		}
	}
	Queue_Push(ptr->context->ungeted_tokens,hold_left);	

	token_ptr_clear_barrier(ptr);
}
void token_ptr_load_functionlike_macro_arguments_with_tokens(struct Token_Pointer *ptr,struct functionlike_define_directive *macro)
{
	int open_bracket_count=1;
	size_t number_of_tokens_in_argument;

	struct token *hold_token;
	struct Queue_Node *hold_argument_node;
	struct Queue_Node *hold_leading_token_node;
	struct functionlike_define_directive_argument *hold_arg;	



	hold_token=token_ptr_get_token_under_pointer(ptr);
	if(hold_token->type!=KW_OPEN_NORMAL)
	{
		push_token_pointer_error(ptr,"Expected '(' after functionlike macro id");
		return;
	}

	hold_argument_node=macro->arguments->first;

	hold_leading_token_node=token_ptr_get_current_queue_node(ptr);

	number_of_tokens_in_argument=0;

	ptr->state=TOKEN_POINTER_STATE_DONT_EXPAND_MACROS;
	
	while(open_bracket_count>=1 && token_ptr_has_remaining_tokens(ptr))
	{
		hold_token=token_ptr_get_token_under_pointer(ptr);
		if(hold_token->type==KW_OPEN_NORMAL)
		{
			++open_bracket_count;
			if(open_bracket_count)
				++number_of_tokens_in_argument;
		}
		else if(hold_token->type==KW_CLOSE_NORMAL)
		{
			--open_bracket_count;
			if(open_bracket_count)
				++number_of_tokens_in_argument;
		}
		else if(hold_token->type==KW_COMMA && open_bracket_count==1) /*if we are at the top level ()*/
		{
			if(hold_argument_node==NULL)
			{
				push_token_pointer_error(ptr,"Too many arguments given to functionlike macro");
				return;
			}
			if(number_of_tokens_in_argument==0)
			{
				push_token_pointer_error(ptr,"No tokens in functionlike macro");
				return;
			}
			hold_arg=(struct functionlike_define_directive_argument*)hold_argument_node->data;
			hold_arg->first_in_argument_substitution_tokens=hold_leading_token_node;
			hold_arg->number_of_substitution_tokens=number_of_tokens_in_argument;

			number_of_tokens_in_argument=0;
			hold_argument_node=hold_argument_node->prev;
			hold_leading_token_node=token_ptr_get_current_queue_node(ptr);
		}else
		{
			++number_of_tokens_in_argument;
		}
	}

	if(hold_argument_node==NULL || hold_argument_node->prev!=NULL)
	{
		push_token_pointer_error(ptr,"Too few arguments given to functionlike macro");
		return;
	}
	hold_arg=(struct functionlike_define_directive_argument*)hold_argument_node->data;
	hold_arg->first_in_argument_substitution_tokens=hold_leading_token_node;
	hold_arg->number_of_substitution_tokens=number_of_tokens_in_argument;

	
	/*
	 * we assume that this was the state of the pointer before setting it
	 * to TOKEN_POINTER_STATE_DONT_EXPAND_MACROS
	 */
	ptr->state=TOKEN_POINTER_STATE_NORMAL; 

}
_Bool token_ptr_do_preprocessing_stuff(struct Token_Pointer *token_pointer,struct token *token)
{
		switch(token->type)
		{
			case PKW_IF:
			case PKW_ELIF:
				token_ptr_execute_if_directive(token_pointer,(struct token_if_directive*)token);
				return 1;
			case PKW_IFDEF:
				token_ptr_execute_ifdef_directive(token_pointer,(struct token_ifdefndef_directive*)token);
				return 1;
			case PKW_IFNDEF:
				token_ptr_execute_ifndef_directive(token_pointer,(struct token_ifdefndef_directive*)token);
				return 1;
			case PKW_ELSE:
				wonky_assert(SHOULD_NOT_REACH_HERE);
				return 1;
			case PKW_ENDIF:
				wonky_assert(SHOULD_NOT_REACH_HERE);
				return 1;
			case PKW_INCLUDE:
				token_ptr_execute_include_directive(token_pointer,(struct token_include_directive*)token);
				return 1;
			case PKW_DEFINE:
				token_ptr_execute_normal_define_directive(token_pointer,(struct token_normal_define_directive*)token);
				return 1;
			case PKW_FUNCTIONLIKE_DEFINE:
				token_ptr_execute_functionlike_define_directive(token_pointer,(struct token_functionlike_define_directive*)token);
				return 1;
			case PKW_UNDEF:
				token_ptr_execute_undef_directive(token_pointer,(struct token_undef_directive*)token);
				return 1;
			case PKW_LINE:
				token_ptr_execute_line_directive(token_pointer,(struct token_line_directive*)token);
				return 1;
			case PKW_ERROR:
				token_ptr_execute_error_directive(token_pointer,(struct token_error_directive*)token);
				return 1;
			case PKW_PRAGMA:
				token_ptr_execute_pragma_directive(token_pointer,(struct token_pragma_directive*)token);
				return 1;
			case PKW_FILE_MACRO:
				token_ptr_execute_file_special_macro(token_pointer,token);
				return 1;/*NOTICE*/
			case PKW_LINE_MACRO:
				token_ptr_execute_line_special_macro(token_pointer,token);
				return 1;/*NOTICE*/
			case PKW_STDC_MACRO:
				token_ptr_execute_stdc_special_macro(token_pointer,token);
				return 1;/*NOTICE*/
			case PKW_STDC_HOSTED_MACRO:
				token_ptr_execute_stdc_hosted_special_macro(token_pointer,token);
				return 1;/*NOTICE*/
			case PKW_STDC_VERSION_MACRO:
				token_ptr_execute_stdc_version_special_macro(token_pointer,token);
				return 1;/*NOTICE*/
			case PKW_DEFINED:
				if(token_pointer->is_in_conditional_directive)
				{
					token_ptr_execute_defined_unary_operator(token_pointer,(struct token_defined_unary_operator*)token);
				}
				return 0;
			case LT_EOF:
				if(token_pointer->call_stack->size>0)
				{
					delete_token_ptr_context(token_pointer->context);
					return 1;
				}else
				{
					token_pointer->context=NULL;
					return 1;
				}
				break;
			case LT_ERROR:
				/*erronous token*/
				push_message_struct(token_pointer->program,((struct token_error*)token)->error);
				token_ptr_goto_next_token(token_pointer);
				return 1;
			case KW_ID:
				{
					struct token_identifier *hold_id_token;
					hold_id_token=(struct token_identifier*)token;
					if(	id_is_a_macro(
								hold_id_token->id,
								token_pointer->program->current_translation_unit_number
							)	
						&&
						token_pointer->state!=TOKEN_POINTER_STATE_DONT_EXPAND_MACROS)
					{
						token_ptr_execute_macro(token_pointer,hold_id_token->id);
						return 1;
					}else if(token_pointer->is_in_conditional_directive)
					{
						token_ptr_goto_next_token(token_pointer);
						/*all undefined id tokens in control expression are replaced with 0*/
						Queue_Push(token_pointer->context->ungeted_tokens,get_constant_token(
								KW_DECIMAL_CONSTANT,
								hold_id_token->delta->location,
								hold_id_token->delta->location,
								"0",
								1));
						return 0;
					}else
					{
						return 0; /*NOTICE*/
					}
					break;
				}
			case KW_AUTO:
			case KW_DO:
			case KW_DOUBLE:
			case KW_INT:
			case KW_STRUCT:
			case KW_BREAK:
			case KW_ELSE:
			case KW_LONG:
			case KW_SWITCH:
			case KW_CASE:
			case KW_ENUM:
			case KW_REGISTER:
			case KW_TYPEDEF:
			case KW_CHAR:
			case KW_EXTERN:
			case KW_RETURN:
			case KW_UNION:
			case KW_CONST:
			case KW_FLOAT:
			case KW_SHORT:
			case KW_UNSIGNED:
			case KW_CONTINUE:
			case KW_FOR:
			case KW_SIGNED:
			case KW_VOID:
			case KW_DEFAULT:
			case KW_GOTO:
			case KW_SIZEOF:
			case KW_VOLATILE:
			case KW_IF:
			case KW_STATIC:
			case KW_WHILE:
			case KW_INLINE:
			case KW_RESTRICT:
			case KW_BOOL:
			case KW_COMPLEX:
			case KW_IMAGINARY:
				{
					struct token_keyword *hold_kw_token;
					hold_kw_token=(struct token_keyword*)token;
					if(	id_is_a_macro(
								hold_kw_token->id,
								token_pointer->program->current_translation_unit_number
							)	
						&&
						token_pointer->state!=TOKEN_POINTER_STATE_DONT_EXPAND_MACROS
					  )
					{
						token_ptr_execute_macro(token_pointer,hold_kw_token->id);
						return 1;
					}else if(token_pointer->is_in_conditional_directive)
					{

						token_ptr_goto_next_token(token_pointer);
						/*all undefined id tokens in control expression are replaced with 0*/
						Queue_Push(token_pointer->context->ungeted_tokens,get_constant_token(
								KW_DECIMAL_CONSTANT,
								hold_kw_token->delta->location,
								hold_kw_token->delta->location,
								"0",
								1));
						return 0;
					}else
					{
						return 0; /*NOTICE*/
					}
					break;
				}
			case PKW_MACRO_ARGUMENT:
				token_ptr_execute_functionlike_macro_argument(token_pointer,((struct token_functionlike_define_argument*)token)->argument);
				return 1;
			case PKW_HASHTAG_UNARY_OP:
				token_ptr_execute_stringify_functionlike_macro_argument(token_pointer,((struct token_hashtag_unary_operator*)token));
				return 1;
			case PKW_HASHTAG_HASHTAG_OP:
				token_ptr_execute_concat_functionlike_macro_arguments(token_pointer,((struct token_hashtag_hastag_operator*)token));
				return 1;
			default:
				return 0;
		}
}
void token_ptr_execute_stdc_hosted_special_macro(struct Token_Pointer *ptr,struct token *directive)
{
	struct token_constant *line;
	token_ptr_goto_next_token(ptr);

	line=(struct token_constant*)get_constant_token(
			KW_DECIMAL_CONSTANT,
			directive->delta->location,
			directive->delta->location,
			"1",
			1);
	Queue_Push(ptr->context->ungeted_tokens,line);
}
void token_ptr_execute_stdc_version_special_macro(struct Token_Pointer *ptr,struct token *directive)
{
	struct token_constant *line;
	token_ptr_goto_next_token(ptr);

	line=(struct token_constant*)get_constant_token(
			KW_LONG_DECIMAL_CONSTANT,
			directive->delta->location,
			directive->delta->location,
			"199901",
			sizeof("199901")-1);
	Queue_Push(ptr->context->ungeted_tokens,line);
}
void delete_token_ptr(struct Token_Pointer *ptr)
{
	wonky_free(ptr);
}
struct Token_Pointer_Context* get_token_ptr_context(struct Queue_Node *start,size_t number_of_remaining_tokens,_Bool is_file_inclusion)
{
	struct Token_Pointer_Context *ret;
	struct Source_Location *hold_location;

	ret=wonky_malloc(sizeof(struct Token_Pointer_Context));

	ret->current_token_node=start;
	ret->number_of_remaining_tokens=number_of_remaining_tokens;

	ret->ungeted_tokens=wonky_malloc(sizeof(struct Queue));
	hold_location=((struct token*)start->data)->delta->location;

	//ret->line=hold_location->line;
	//ret->column=hold_location->column;
	
	ret->line=0;
	ret->column=0;
	ret->filename=hold_location->src->src_name->name;
	ret->filename_size=hold_location->src->src_name->name_size;
	ret->executed_macro_id=NULL;
	ret->has_saved_functionlike_macro_state=0;
	ret->functionlike_macro_arguments_save_state=NULL;
	ret->saved_macro=NULL;
	ret->is_file_inclusion=is_file_inclusion;


	Queue_Init(ret->ungeted_tokens);
	return ret;
}
void token_ptr_pop_context(struct Token_Pointer *ptr)
{
	wonky_assert(ptr && ptr->call_stack && ptr->call_stack->size && ptr->call_stack->first->data);
	wonky_assert(ptr->context && !ptr->context->barrier);

	if(ptr->context->executed_macro_id)
		ptr->context->executed_macro_id->was_already_expanded_as_a_macro=0;
	/*
	   this was_already_expanded_as_a_macro was set to 0
	   because it technically should not have been expanded when the tokens
	   in the macro argument were expanded. reseting to 1 
	 */
	if(ptr->context->has_saved_functionlike_macro_state)
	{
		token_ptr_load_functionlike_macro_state_from_context(ptr->context);
	}

	delete_token_ptr_context(ptr->context);
	ptr->context=Stack_Pop(ptr->call_stack);
}
_Bool token_ptr_has_remaining_tokens(struct Token_Pointer *ptr)
{
	if(ptr->state==TOKEN_POINTER_STATE_ERROR)
		return 0;
	if(token_ptr_has_buffered_tokens(ptr))
	{
		return 1;
	}else
	{

		while(!token_ptr_has_remaining_tokens_in_current_context(ptr) 
				&& ptr->call_stack->size>0
				&& !ptr->context->barrier)
			token_ptr_pop_context(ptr);
		return token_ptr_has_remaining_tokens_in_current_context(ptr);
	}
}
_Bool token_ptr_has_remaining_tokens_in_current_context(struct Token_Pointer *ptr)
{
	wonky_assert(ptr->context!=NULL);
	return ptr->context->number_of_remaining_tokens!=0 || ptr->context->ungeted_tokens->size!=0;

}
void token_ptr_unget_token(struct Token_Pointer *ptr,struct token *token)
{
	Queue_Push(ptr->context->ungeted_tokens,token);
}
void token_ptr_assume_location_of_token(struct Token_Pointer *ptr,struct token *token)
{
	wonky_assert(token!=NULL);
	ptr->context->line+=token->delta->line_offset;
	ptr->context->column=token->delta->column;
}
_Bool token_ptr_has_buffered_tokens(struct Token_Pointer *ptr)
{
	return ptr->context->ungeted_tokens->size!=0;
}
struct token* token_ptr_get_buffered_token(struct Token_Pointer *ptr)
{
	if(token_ptr_has_buffered_tokens(ptr))
		return (struct token*)Queue_Pop(ptr->context->ungeted_tokens);
	else 
		return NULL;
}
struct Queue_Node* token_ptr_get_current_queue_node(struct Token_Pointer *ptr)
{
	return ptr->context->current_token_node;
}
struct token* token_ptr_check_buffered_token(struct Token_Pointer *ptr)
{
	if(token_ptr_has_buffered_tokens(ptr))
		return (struct token*)ptr->context->ungeted_tokens->first->data;
	else 
		return NULL;
}
void token_ptr_set_barrier(struct Token_Pointer *ptr)
{
	wonky_assert(ptr && ptr->context && !ptr->context->barrier);
	ptr->context->barrier=1;
}
void token_ptr_clear_barrier(struct Token_Pointer *ptr)
{
	wonky_assert(ptr && ptr->context && ptr->context->barrier);
	ptr->context->barrier=0;
}
void token_ptr_jump_to(struct Token_Pointer *ptr,struct Queue_Node *where_to,size_t number_of_remaining_tokens,_Bool is_file_inclusion)
{
	struct Token_Pointer_Context *new_context;

	if(where_to!=NULL && number_of_remaining_tokens!=0)
	{
		if(ptr->call_stack->size>1000)
		{
			push_token_pointer_error(ptr,"Preprocessing bounds exceeded");
			return;
		}
		new_context=get_token_ptr_context(where_to,number_of_remaining_tokens,is_file_inclusion);
		Stack_Push(ptr->call_stack,ptr->context);
		ptr->context=new_context;
	}
}
void token_ptr_jump_to_functionlike_macro_argument(struct Token_Pointer *ptr,struct functionlike_define_directive_argument *arg)
{
	struct Token_Pointer_Context *new_context;

	if(arg->first_in_argument_substitution_tokens!=NULL && arg->number_of_substitution_tokens!=0)
	{
		if(ptr->call_stack->size>1000)
		{
			push_token_pointer_error(ptr,"Preprocessing bounds exceeded");
			return;
		}
		new_context=get_token_ptr_context(arg->first_in_argument_substitution_tokens,arg->number_of_substitution_tokens,0);
		token_ptr_store_functionlike_macro_state_into_context(new_context,arg->belongs_to->define);
		Stack_Push(ptr->call_stack,ptr->context);
		ptr->context=new_context;
	}
}
void token_ptr_jump_to_first(struct Token_Pointer *ptr,struct Queue *queue,_Bool is_file_inclusion)
{
	token_ptr_jump_to(ptr,queue->first,queue->size,is_file_inclusion);
}
void delete_token_ptr_context(struct Token_Pointer_Context *context)
{
	wonky_free(context);
}
#endif