#ifndef WONKY_LEX_PREPROCESSING_DIRECTIVE_C
#define WONKY_LEX_PREPROCESSING_DIRECTIVE_C WONKY_LEX_PREPROCESSING_DIRECTIVE_C
#include <lex_preprocessing_directive.h>
struct token* preprocessing_lex_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
struct token *ret;
ret=preprocessing_extract_next_directive(lexer_data);
if(ret==NULL)
return get_error_token("PREPROCESSING EMPTY DIRECTIVE NOT SUPPORTED",where,lexer_data->previous_token_location,lexer_data->program);
return ret;
}
struct token* preprocessing_lex_defined_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
struct token_defined_unary_operator *ret;
struct token *hold_token;
ret=wonky_malloc(sizeof(struct token_defined_unary_operator));
ret->type=PKW_DEFINED;
ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
hold_token=preprocessing_extract_next_token(lexer_data);
if(hold_token==NULL)
{
push_generic_error(lexer_data->program,"Expected id in defined unary operator");
}else
{
if(token_is_identifier_in_preprocessing(hold_token))
{
ret->id=hold_token;
}else if(hold_token->type==KW_OPEN_NORMAL)
{
hold_token=preprocessing_extract_next_token(lexer_data);
if(token_is_identifier_in_preprocessing(hold_token))
{
ret->id=hold_token;
hold_token=preprocessing_extract_next_token(lexer_data);
if(hold_token->type!=KW_CLOSE_NORMAL)
push_generic_error(lexer_data->program,"Expected ')' in defined unary operator after id");
}else
{
push_generic_error(lexer_data->program,"Expected id in defined unary operator after '('");
}
}else
{
push_generic_error(lexer_data->program,"Expected id in defined unary operator");
}
}
return (struct token*)ret;
}
struct token* preprocessing_lex_hastag_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive)
{
struct token_hashtag_unary_operator *ret;
struct token *hold_token;
ret=wonky_malloc(sizeof(struct token_hashtag_unary_operator));
ret->type=PKW_HASHTAG_UNARY_OP;
ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
if(!preprocessing_eol(lexer_data))
hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
if(hold_token==NULL || hold_token->type!=PKW_MACRO_ARGUMENT)
{
push_generic_error(lexer_data->program,"Expected macro argument after #");
ret->operand=NULL;
return (struct token*)ret;
}
ret->operand=(struct token_functionlike_define_argument*)hold_token;
return (struct token*)ret;
}
struct token* preprocessing_lex_hastag_hashtag_operator_in_functionlike_macro(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive,struct token *previous_token)
{
struct token *hold_token;
struct Queue *ret_q;
ret_q=wonky_malloc(sizeof(struct Queue));
Queue_Init(ret_q);
Queue_Push(ret_q,previous_token);
if(preprocessing_eol(lexer_data))
{
push_generic_error(lexer_data->program,"Expected something after ##");
return (struct token*)previous_token;
}else
{
/*the first '##' has been eaten by the caller*/
do{
/*first pass is not eol*/
hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
Queue_Push(ret_q,hold_token);
}while(preprocessing_get_and_check_token(lexer_data,KW_HASHTAG_HASHTAG) && !preprocessing_eol(lexer_data));
if(!preprocessing_eol(lexer_data))
Queue_Push(ret_q,previous_token);
return (struct token*)get_hashtag_hashtag_operator(where,lexer_data->previous_token_location,ret_q);
}
}
struct token* preprocessing_extract_next_directive(struct Lexer_Data *lexer_data)
{
return preprocessing_extract_next_token_inner(lexer_data,1,0);
}
struct token* preprocessing_extract_next_token_in_iflike_directive_control_statement(struct Lexer_Data *lexer_data)
{
return preprocessing_extract_next_token_inner(lexer_data,0,1);
}
struct token* preprocessing_extract_next_token(struct Lexer_Data *lexer_data)
{
return preprocessing_extract_next_token_inner(lexer_data,0,0);
}
struct token* preprocessing_extract_next_token_inner(struct Lexer_Data *lexer_data,_Bool extract_directive,_Bool extract_defined_statement)
{
struct token *ret;
struct Automata_Node *hold_node;
size_t where_does_the_token_start_in_the_source_file;
if(lexer_data->buffer_token!=NULL)
{
ret=lexer_data->buffer_token;
lexer_data->buffer_token=NULL;
return ret;
}
preprocessing_skip_white_space(lexer_data);
where_does_the_token_start_in_the_source_file=lexer_data->where_in_src;
if(preprocessing_eol(lexer_data))
return NULL;
hold_node=preprocessing_feed_automata_until_error(lexer_data);
if(hold_node==NULL)
return get_error_token("Unrecognised lexical element",get_source_location(
lexer_data->which_column,
lexer_data->which_row,
where_does_the_token_start_in_the_source_file,
lexer_data->where_in_src-where_does_the_token_start_in_the_source_file,
lexer_data->src
),
lexer_data->previous_token_location,
lexer_data->program);
ret=preprocessing_make_token_finishing_on_node(lexer_data, hold_node, where_does_the_token_start_in_the_source_file,extract_directive,extract_defined_statement);
lexer_data->is_in_the_begining_of_line=0;
return ret;
}
_Bool preprocessing_get_and_check_token(struct Lexer_Data *lexer_data,enum LEXER_TYPE token_type)
{
if(preprocessing_eol(lexer_data))
return 0;
if(lexer_data->buffer_token==NULL)
lexer_data->buffer_token=preprocessing_extract_next_token(lexer_data);
if(lexer_data->buffer_token && lexer_data->buffer_token->type==token_type)
{
lexer_data->buffer_token=NULL;
return 1;
}else
{
return 0;
}
}
struct Automata_Node* preprocessing_feed_automata_until_error(struct Lexer_Data *lexer_data)
{
struct Automata_Node *head;
struct Automata_Node *follower;
head=&chonky[0];
follower=NULL;
while(head!=NULL)
{
follower=head;
head=preprocessing_feed_automata_next_char(lexer_data,head);
}
return follower;
}
struct Automata_Node *preprocessing_feed_automata_next_char(struct Lexer_Data *lexer_data,struct Automata_Node *node)
{
size_t hold_where_in_src;
size_t hold_which_column;
size_t hold_which_row;
struct Automata_Node *ret;
enum Source_Chars ch;
hold_where_in_src=lexer_data->where_in_src;
hold_which_column=lexer_data->which_column;
hold_which_row=lexer_data->which_row;
ch=lexer_get_ch_accounting_for_linesplices_and_comments(lexer_data->program,lexer_data->src->src,
lexer_data->src->src_size,
&lexer_data->where_in_src,
&lexer_data->which_row,
&lexer_data->which_column);
if(ch==CHAR_FORM_FEED_TAB)
{
ret=NULL;
}else if(node->delta[ch]==id_node)
{
ret=get_new_id_node(node,ch);
}else
{
ret=node->delta[ch];
}
if(ret==NULL)
{
lexer_data->where_in_src=hold_where_in_src;
lexer_data->which_column=hold_which_column;
lexer_data->which_row=hold_which_row;
return NULL;
}else
{
return ret;
}
}
void preprocessing_skip_white_space(struct Lexer_Data *lexer_data)
{
preprocessing_skip_white_space_inner(lexer_data->program,
lexer_data->src->src,
lexer_data->src->src_size,
&lexer_data->where_in_src,
&lexer_data->which_row,
&lexer_data->which_column);
}
void preprocessing_skip_white_space_inner(struct Program *program,char *src,size_t src_size,size_t *where_in_src,size_t *which_line,size_t *which_col)
{
size_t wh_in_src=*where_in_src;
size_t wh_line=*which_line;
size_t wh_col=*which_col;
for(enum Source_Chars ch=CHAR_SPACE;
(ch==CHAR_SPACE || ch==CHAR_VERTICAL_TAB || ch==CHAR_HORISONTAL_TAB) && wh_in_src<src_size;
ch=lexer_get_ch_accounting_for_linesplices_and_comments(
program,
src,
src_size,
&wh_in_src,
&wh_line,
&wh_col)
)
{
*where_in_src=wh_in_src;
*which_line=wh_line;
*which_col=wh_col;
}
}
_Bool preprocessing_eol(struct Lexer_Data *lexer_data)
{
char *src=lexer_data->src->src;
size_t src_size=lexer_data->src->src_size;
size_t where_in_src;
size_t line;
size_t column;
if(lexer_data->buffer_token)
{
return 0;
} else
{
where_in_src=lexer_data->where_in_src;
line=lexer_data->which_row;
column=lexer_data->which_column;
}
preprocessing_skip_white_space_inner(lexer_data->program,src,src_size,&where_in_src,&line,&column);
return lexer_get_ch_accounting_for_linesplices_and_comments(lexer_data->program,src,src_size,&where_in_src,&line,&column)==CHAR_FORM_FEED_TAB;
}
struct token *preprocessing_make_token_finishing_on_node(struct Lexer_Data *lexer_data,struct Automata_Node *finishing_node,size_t start_position,_Bool create_directive,_Bool create_defined_statement)
{
struct Source_Location *token_location;
wonky_assert(lexer_data->where_in_src > start_position);
wonky_assert(is_valid_automata_node(finishing_node));
token_location=get_source_location(
lexer_data->which_row,
lexer_data->which_column,
start_position,
lexer_data->where_in_src-start_position,
lexer_data->src
);
if(create_directive)
{
switch(finishing_node->preprocessing_keyword)
{
case PKW_IF:
return preprocessing_lex_if_directive(lexer_data,token_location,PKW_IF);
case PKW_IFDEF:
return preprocessing_lex_ifdef_directive(lexer_data,token_location);
case PKW_IFNDEF:
return preprocessing_lex_ifndef_directive(lexer_data,token_location);
case PKW_ELIF:
if(lexer_data->is_in_if_directive_body)
{
return preprocessing_lex_if_directive(lexer_data,token_location,PKW_ELIF);
}
else
return get_error_token("Stray #elif not participating in any #if",token_location,lexer_data->previous_token_location,lexer_data->program);
case PKW_ELSE:
if(lexer_data->is_in_if_directive_body)
return preprocessing_return_else_token(lexer_data,token_location);
else
return get_error_token("Stray #else not participating in any #if",token_location,lexer_data->previous_token_location,lexer_data->program);
case PKW_ENDIF:
if(lexer_data->is_in_if_directive_body)
return preprocessing_return_endif_token(lexer_data,token_location);
else
return get_error_token("Stray #endif not participating in any #if",token_location,lexer_data->previous_token_location,lexer_data->program);
case PKW_INCLUDE:
return preprocessing_lex_include_directive(lexer_data,token_location);
case PKW_DEFINE:
return preprocessing_lex_define_directive(lexer_data,token_location);
case PKW_UNDEF:
return preprocessing_lex_undef_directive(lexer_data,token_location);
case PKW_LINE:
return preprocessing_lex_line_directive(lexer_data,token_location);
case PKW_ERROR:
return preprocessing_lex_error_directive(lexer_data,token_location);
case PKW_PRAGMA:
return get_error_token("PREPROCESSING PRAGMA NOT DONE",token_location,lexer_data->previous_token_location,lexer_data->program);
default:
return lexer_make_token_finishing_on_node(lexer_data,finishing_node,start_position);
}
wonky_assert(SHOULD_NOT_REACH_HERE);
}else if(create_defined_statement && finishing_node->preprocessing_keyword==PKW_DEFINED)
{
return preprocessing_lex_defined_unary_operator(lexer_data,token_location);
}else
{
return lexer_make_token_finishing_on_node(lexer_data,finishing_node,start_position);
}
}
struct token* preprocessing_lex_line_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
struct token_line_directive *ret;
struct token_constant *hold_token_constant;
struct token_string *hold_token_string;
ret=wonky_malloc(sizeof(struct token_line_directive));
ret->type=PKW_LINE;
ret->directive_delta=get_source_location_delta(lexer_data->previous_token_location,where);
ret->filename=NULL;
if(!preprocessing_eol(lexer_data))
{
hold_token_constant=(struct token_constant*)preprocessing_extract_next_token(lexer_data);
if(hold_token_constant->type!=KW_CONSTANT || !type_is_a_plain_signed_int(hold_token_constant->constant->type))
{
push_generic_error(lexer_data->program,"Expected a digit sequence in the #line directive");
ret->line=0;
return (struct token*)ret;
}else
{
/*TODO make it not host type specific*/
ret->line=*(int*)hold_token_constant->constant->value;
wonky_free(hold_token_constant);
}
}else
{
push_generic_error(lexer_data->program,"Expected a digit sequence in the #line directive");
ret->line=0;
return (struct token*)ret;
}
if(!preprocessing_eol(lexer_data))
{
hold_token_string=(struct token_string*)preprocessing_extract_next_token(lexer_data);
if(hold_token_string->type!=KW_STRING || !type_is_a_normal_string(hold_token_string->constant->type))
{
push_generic_error(lexer_data->program,"Expected a digit sequence in the #line directive");
ret->line=0;
return (struct token*)ret;
}else
{
/*TODO make it not host type specific*/
ret->filename=(char*)hold_token_string->constant->value;
ret->filename_size=get_type_size(hold_token_string->constant->type);
}
}
return (struct token*)ret;
}
/* pp[opt] nl */
struct token* preprocessing_lex_error_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
struct token_error_directive *ret;
size_t hold_start_location;
size_t hold_error_size;
struct wonky_stream string_stream;
hold_start_location=lexer_data->where_in_src;
preprocessing_goto_end_of_line(lexer_data);
ret=wonky_malloc(sizeof(struct token_error_directive));
ret->type=PKW_ERROR;
ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
hold_error_size=lexer_data->where_in_src-hold_start_location;
ret->error_message=get_wonky_message(WONKY_MESSAGE_TYPE_ERROR,WONKY_MESSAGE_SOURCE_TRANSLATION,where->line,where->column,where->src->src_name->name,where->src->src_name->name_size,"#error");
string_stream=wonky_string_stream(ret->error_message->message);
wonky_fseek(&string_stream,0,SEEK_END);
wonky_write(&string_stream,lexer_data->src->src+hold_start_location,hold_error_size);
wonky_stream_delete(&string_stream);
return (struct token*)ret;
}
struct token* preprocessing_lex_pragma_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
struct token_pragma_directive *ret;
ret->type=PKW_PRAGMA;
ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
ret->pragma_type=PRAGMA_TYPE_END;
preprocessing_goto_end_of_line(lexer_data);
return (struct token*)ret;
}
void preprocessing_goto_end_of_line(struct Lexer_Data *lexer_data)
{
enum {
START,
START_OF_POSSIBLE_LINE_SPLICE,
END,
} state;
state=START;
do{
if(lexer_eof(lexer_data))
return ;
switch(state)
{
case START:
if(lexer_data->src->src[lexer_data->where_in_src] == '\\')
{
state=START_OF_POSSIBLE_LINE_SPLICE;
++lexer_data->where_in_src;
++lexer_data->which_column;
}else if(lexer_data->src->src[lexer_data->where_in_src]=='\n')
{
state=END;
}else
{
state=START;
++lexer_data->where_in_src;
++lexer_data->which_column;
}
break;
case START_OF_POSSIBLE_LINE_SPLICE:
if(lexer_data->src->src[lexer_data->where_in_src] == '\n')
{
state=START;
++lexer_data->where_in_src;
lexer_data->which_column=0;
++lexer_data->which_row;
}else
{
state=START;
}
break;
default:
wonky_assert(SHOULD_NOT_REACH_HERE);
}
}while(state!=END);
}
struct token* preprocessing_lex_include_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
struct token_include_directive *ret;
struct token *hold_token;
ret=wonky_malloc(sizeof(struct token_include_directive));
ret->type=PKW_INCLUDE;
ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
ret->tokens=wonky_malloc(sizeof(struct Queue));
Queue_Init(ret->tokens);
#warning a hashtag include "asdfadf" [ space ] makes wonky get corrupted ... ...
while(!preprocessing_eol(lexer_data))
{
hold_token=preprocessing_extract_next_token(lexer_data);
if(hold_token!=NULL)
Queue_Push(ret->tokens,hold_token);
}
return (struct token*)ret;
}
struct token* preprocessing_lex_if_directive(struct Lexer_Data *lexer_data,struct Source_Location *where,enum LEXER_TYPE if_type)
{
struct token_if_directive *ret;
wonky_assert(if_type==PKW_IF || if_type==PKW_ELIF);
ret=wonky_malloc(sizeof(struct token_if_directive));
ret->type=if_type;
ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
ret->controlling_expression=wonky_malloc(sizeof(struct Queue));
ret->if_true=wonky_malloc(sizeof(struct Queue));
ret->if_false=wonky_malloc(sizeof(struct Queue));
Queue_Init(ret->controlling_expression);
Queue_Init(ret->if_true);
Queue_Init(ret->if_false);
preprocessing_lex_if_directive_control_statement(lexer_data,ret->controlling_expression);
preprocessing_lex_finish_iflike_directive(lexer_data,ret->if_true,ret->if_false);
return (struct token*)ret;
}
void preprocessing_lex_if_directive_control_statement(struct Lexer_Data *lexer_data,struct Queue *control_statement_tokens)
{
while(!preprocessing_eol(lexer_data))
Queue_Push(control_statement_tokens,
preprocessing_extract_next_token_in_iflike_directive_control_statement(lexer_data));
}
struct token* preprocessing_lex_ifdef_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
return preprocessing_lex_ifdefndef_directive(lexer_data,where,PKW_IFDEF);
}
struct token* preprocessing_lex_ifndef_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
return preprocessing_lex_ifdefndef_directive(lexer_data,where,PKW_IFNDEF);
}
struct token* preprocessing_lex_ifdefndef_directive(struct Lexer_Data *lexer_data,struct Source_Location *where,enum LEXER_TYPE type)
{
struct token_ifdefndef_directive *ret;
struct token *hold_token;
wonky_assert(type==PKW_IFNDEF || type==PKW_IFDEF);
ret=wonky_malloc(sizeof(struct token_ifdefndef_directive));
ret->type=type;
ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
ret->if_defined=wonky_malloc(sizeof(struct Queue));
ret->if_undefined=wonky_malloc(sizeof(struct Queue));
Queue_Init(ret->if_defined);
Queue_Init(ret->if_undefined);
if(!preprocessing_eol(lexer_data))
hold_token=preprocessing_extract_next_token(lexer_data);
else
hold_token==NULL;
if(hold_token==NULL || !token_is_identifier_in_preprocessing(hold_token))
{
push_generic_error(lexer_data->program,"Expected id in #ifdef directive");
ret->id=NULL;
}else
{
ret->id=hold_token;
if(type==PKW_IFDEF)
preprocessing_lex_finish_iflike_directive(lexer_data,ret->if_defined,ret->if_undefined);
else
preprocessing_lex_finish_iflike_directive(lexer_data,ret->if_undefined,ret->if_defined);
}
return (struct token*)ret;
}
void preprocessing_lex_finish_iflike_directive(struct Lexer_Data *lexer_data,struct Queue *if_true,struct Queue *if_false)
{
struct token *hold_token;
struct token_if_directive *ret;
_Bool hold_lexerdata_state;
hold_lexerdata_state=lexer_data->is_in_if_directive_body; /*=(*/
lexer_data->is_in_if_directive_body=1;
/* За мен: тука може да му пуснеш типа на токена и да парсваш спрямо него.
* Не е супер сложното парсване тъй че може да го правиш линейно. (По-оптимално
* е спрямо стека така или иначе :Р
* НОВО: направо не давай типа, а направо си парсвай всичко в една фунцкия!
* */
while((hold_token=lexer_extract_next_token(lexer_data))!=NULL)
{
if(hold_token->type==PKW_ELSE)
{
while(!lexer_check(lexer_data,PKW_ENDIF) && (hold_token=lexer_extract_next_token(lexer_data))!=NULL)
{
Queue_Push(if_false,hold_token);
}
if(lexer_check(lexer_data,PKW_ENDIF))
{
hold_token=lexer_extract_next_token(lexer_data);
}else if(lexer_eof(lexer_data))
{
push_generic_error(lexer_data->program,"Reached end of file before reaching a #endif");
}
if(hold_token)
delete_token(hold_token);
break;
}else if(hold_token->type==PKW_ELIF)
{
Queue_Push(if_false,hold_token);
break;
}else if(hold_token->type==PKW_ENDIF)
{
break;
}else
{
Queue_Push(if_true,hold_token);
}
}
lexer_data->is_in_if_directive_body=hold_lexerdata_state;
}
struct token* preprocessing_lex_define_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
struct token *hold_token;
struct token *hold_hold_token;
struct identifier *hold_id;
struct token *ret;
hold_token=preprocessing_extract_next_token(lexer_data);
if(hold_token==NULL)
{
preprocessing_goto_end_of_line(lexer_data);
delete_token((struct token*)hold_token);
/*NOTICE*/
return get_error_token("Id exprected after #define directive",where,lexer_data->previous_token_location,lexer_data->program);
}else if(hold_token->type==KW_ID)
{
hold_id=((struct token_identifier*)hold_token)->id;
}else if(token_is_keyword(hold_token))
{
hold_id=((struct token_keyword*)hold_token)->id;
}
#warning make lexer_data_check_char(lexer_data,CHAR_OPEN_NORMAL) so we check for '(' directly next to id
hold_hold_token=preprocessing_extract_next_token(lexer_data);
if(hold_hold_token==NULL)
{
ret=get_normal_define_directive_token(where,lexer_data->previous_token_location,hold_id);
delete_token((struct token*)hold_token);
return ret;
}else if(hold_hold_token->type==KW_OPEN_NORMAL)
{
ret=preprocessing_lex_functionlike_define_directive(lexer_data,where,hold_id);
delete_token((struct token*)hold_token);
delete_token(hold_hold_token);
return ret;
}else
{
ret=preprocessing_lex_normal_define_directive(lexer_data,where,hold_id,hold_hold_token);
delete_token((struct token*)hold_token);
delete_token(hold_hold_token);
return ret;
}
wonky_assert(SHOULD_NOT_REACH_HERE);
}
/*
* the first replacement token is the one the caller got
* when checking if the define directive is a function like
* on or a normal one.
* TODO: remove comment and make function less obscure
*/
struct token* preprocessing_lex_normal_define_directive(struct Lexer_Data *lexer_data,struct Source_Location *where,struct identifier *id,struct token *first_replacement_token)
{
struct token_normal_define_directive *ret;
ret=(struct token_normal_define_directive*)get_normal_define_directive_token(where,lexer_data->previous_token_location,id);
if(first_replacement_token)
{
Queue_Push(ret->define->replacement_tokens,first_replacement_token);
preprocessing_push_tokens_into_queue_until_eol(lexer_data,ret->define->replacement_tokens);
}
return (struct token*)ret;
}
/*
* [id,]* ) replacement-list
* the first '(' is chomped from caller
* TODO: remove comment and make function less obscure
*/
struct token* preprocessing_lex_functionlike_define_directive(struct Lexer_Data *lexer_data,struct Source_Location *where,struct identifier *id)
{
struct token_functionlike_define_directive *ret;
ret=(struct token_functionlike_define_directive*)get_functionlike_define_directive_token(where,lexer_data->previous_token_location,id);
preprocessing_parse_functionlike_macro_id_list(lexer_data,ret);
lexer_data->automata_view=AUTOMATA_VIEW_PREPROCESSING_DIRECTIVE;
preprocessing_push_functionlike_macro_substitution_tokens(lexer_data,where,ret->define);
lexer_data->automata_view=AUTOMATA_VIEW_NORMAL;
return (struct token*)ret;
}
struct token* preprocessing_lex_undef_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
struct token_undef_directive *ret;
struct token *hold_token;
ret=wonky_malloc(sizeof(struct token_undef_directive));
ret->type=PKW_UNDEF;
ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
ret->id=NULL;
hold_token=preprocessing_extract_next_token(lexer_data);
if(hold_token==NULL || !token_is_identifier_in_preprocessing(hold_token))
{
push_generic_error(lexer_data->program,"Expected id in #undef directive");
}else
{
if(token_is_a_special_macro(hold_token))
{
push_generic_error(lexer_data->program,"Special macro id in #undef directive is not allowed");
}else
{
ret->id=hold_token;
}
}
return (struct token*)ret;
}
void preprocessing_push_tokens_into_queue_until_eol(struct Lexer_Data *lexer_data,struct Queue *queue)
{
while(!preprocessing_eol(lexer_data))
Queue_Push(queue,preprocessing_extract_next_token(lexer_data));
}
/*You must not call this when eol*/
struct token* preprocessing_get_token_for_functionlike_macro_substitution_list(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive)
{
struct token *hold_token;
/*returns NULL on eol*/
hold_token=preprocessing_extract_next_token(lexer_data);
//*(volatile char*)hold_token;
/*no eol tokens*/
wonky_assert(hold_token!=NULL);
if(hold_token->type==KW_ID && ((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive==directive)
{
return get_functionlike_define_directive_argument_token(
where,
lexer_data->previous_token_location,
((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive_argument);
}else if(token_is_keyword(hold_token) && ((struct token_keyword*)hold_token)->id->hold_functionlike_define_directive==directive)
{
return get_functionlike_define_directive_argument_token(
where,
lexer_data->previous_token_location,
((struct token_keyword*)hold_token)->id->hold_functionlike_define_directive_argument);
}else if(hold_token->type==KW_HASHTAG)
{
return preprocessing_lex_hastag_unary_operator(lexer_data,where,directive);
}else
{
return hold_token;
}
}
void preprocessing_push_functionlike_macro_substitution_tokens(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive)
{
struct token *hold_token;
while(!preprocessing_eol(lexer_data))
{
hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
if(preprocessing_get_and_check_token(lexer_data,KW_HASHTAG_HASHTAG))
{
if(preprocessing_eol(lexer_data))
{
push_generic_error(lexer_data->program,"## is the last token in the functionlike macro substitution tokens");
return;
}else
{
hold_token=preprocessing_lex_hastag_hashtag_operator_in_functionlike_macro(lexer_data,where,directive,hold_token);
}
}
Queue_Push(directive->replacement_tokens,hold_token);
}
}
void preprocessing_parse_functionlike_macro_id_list(struct Lexer_Data *lexer_data,struct token_functionlike_define_directive *directive)
{
struct token *hold_token;
while(!preprocessing_eol(lexer_data))
{
hold_token=preprocessing_extract_next_token(lexer_data);
if(hold_token->type==KW_ELIPSIS)
{
directive->define->is_variadic=1;
delete_token(hold_token);
hold_token=preprocessing_extract_next_token(lexer_data);
if(hold_token->type==KW_OPEN_NORMAL)
{
delete_token(hold_token);
break;
}else
{
push_generic_error(lexer_data->program,"Expected ')' after '...' in functionlike macro");
break;
}
}else if(hold_token->type==KW_ID)
{
#warning make it check for duplicate ids here
struct functionlike_define_directive_argument *hold_argument;
hold_argument=get_functionlike_define_directive_argument(directive);
Queue_Push(directive->define->arguments,hold_argument);
((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive=directive->define;
((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive_argument=hold_argument;
delete_token(hold_token);
}else if(token_is_keyword(hold_token))
{
#warning make it check for duplicate ids here
Queue_Push(directive->define->arguments,
get_functionlike_define_directive_argument(directive));
((struct token_keyword*)hold_token)->id->hold_functionlike_define_directive=directive->define;
delete_token(hold_token);
}else if(hold_token->type==KW_CLOSE_NORMAL)
{
delete_token(hold_token);
break;
}
}
}
/*these two are quite the pair of hacks*/
struct token* preprocessing_return_else_token(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
static struct token error={.type=PKW_ELSE};
error.delta=get_source_location_delta(lexer_data->previous_token_location,where);
return (struct token*)&error;
}
struct token* preprocessing_return_endif_token(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
static struct token endif={.type=PKW_ENDIF};
endif.delta=get_source_location_delta(lexer_data->previous_token_location,where);
return (struct token*)&endif;
}
#endif