WONKY



LOG | FILES | OVERVIEW


F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c --- a/src/frontend/lex/lex_preprocessing_directive.c +++ b/src/frontend/lex/lex_preprocessing_directive.c
return (struct token*)ret;
}
- struct token* preprocessing_lex_hastag_hashtag_operator(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive,struct token *previous_token)
+ struct token* preprocessing_lex_hastag_hashtag_operator_in_functionlike_macro(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive,struct token *previous_token)
{
- struct token_hashtag_hastag_operator *ret;
struct token *hold_token;
+ struct Queue *ret_q;
+
+ ret_q=wonky_malloc(sizeof(struct Queue));
+ Queue_Init(ret_q);
+ Queue_Push(ret_q,previous_token);
- ret=wonky_malloc(sizeof(struct token_hashtag_hastag_operator));
- ret->type=KW_HASHTAG_HASHTAG;
- ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
- ret->left=previous_token;
-
-
- hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
-
- if(hold_token==NULL)
+ if(preprocessing_eol(lexer_data))
{
push_lexing_error("Expected something after ##",lexer_data);
- return (struct token*)ret;
+ return (struct token*)previous_token;
}else
{
- ret->right=hold_token;
- return (struct token*)ret;
+ /*the first '##' has been eaten by the caller*/
+ do{
+ Queue_Push(ret_q,previous_token);
+ hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
+ }while(preprocessing_get_and_check_token(lexer_data,KW_HASHTAG_HASHTAG) && !preprocessing_eol(lexer_data));
+ if(!preprocessing_eol(lexer_data))
+ Queue_Push(ret_q,previous_token);
+ return (struct token*)get_hashtag_hashtag_operator(where,lexer_data->previous_token_location,ret_q);
}
- wonky_assert(SHOULD_NOT_REACH_HERE);
}
struct token* preprocessing_extract_next_directive(struct Lexer_Data *lexer_data)
{
struct Automata_Node *hold_node;
size_t where_does_the_token_start_in_the_source_file;
+ if(lexer_data->buffer_token!=NULL)
+ {
+ ret=lexer_data->buffer_token;
+ lexer_data->buffer_token=NULL;
+ return ret;
+ }
+
do{
lexer_data->is_in_the_begining_of_line=0;
return ret;
}
+ _Bool preprocessing_get_and_check_token(struct Lexer_Data *lexer_data,enum LEXER_TYPE token_type)
+ {
+ if(lexer_data->buffer_token==NULL)
+ lexer_data->buffer_token=preprocessing_extract_next_token(lexer_data);
+
+ if(lexer_data->buffer_token && lexer_data->buffer_token->type==token_type)
+ {
+ lexer_data->buffer_token=NULL;
+ return 1;
+ }else
+ {
+ return 0;
+ }
+ }
struct Automata_Node* preprocessing_feed_automata_until_error(struct Lexer_Data *lexer_data)
{
struct Automata_Node *head;
lexer_data->src->src_name
);
- switch(finishing_node->preprocessing_keyword)
- {
- case PKW_IF:
- return preprocessing_lex_if_directive(lexer_data,token_location);
- case PKW_IFDEF:
- return preprocessing_lex_ifdef_directive(lexer_data,token_location);
- case PKW_IFNDEF:
- return preprocessing_lex_ifndef_directive(lexer_data,token_location);
- case PKW_ELIF:
- return preprocessing_lex_if_directive(lexer_data,token_location);
- case PKW_ELSE:
- return preprocessing_return_else_token(lexer_data,token_location);
- case PKW_ENDIF:
- return preprocessing_return_endif_token(lexer_data,token_location);
- case PKW_INCLUDE:
- return preprocessing_lex_include_directive(lexer_data,token_location);
- case PKW_DEFINE:
- return preprocessing_lex_define_directive(lexer_data,token_location);
- case PKW_UNDEF:
- return preprocessing_lex_undef_directive(lexer_data,token_location);
- case PKW_LINE:
- return preprocessing_lex_line_directive(lexer_data,token_location);
- case PKW_ERROR:
- return preprocessing_lex_error_directive(lexer_data,token_location);
- case PKW_PRAGMA:
- return get_error_token("PREPROCESSING PRAGMA NOT DONE",token_location,lexer_data->previous_token_location,lexer_data->program);
- case PKW_DEFINED:
- return preprocessing_lex_defined_unary_operator(lexer_data,token_location);
- default:
- return lexer_make_token_finishing_on_node(lexer_data,finishing_node,start_position);
+ if(create_directive)
+ {
+ switch(finishing_node->preprocessing_keyword)
+ {
+ case PKW_IF:
+ return preprocessing_lex_if_directive(lexer_data,token_location);
+ case PKW_IFDEF:
+ return preprocessing_lex_ifdef_directive(lexer_data,token_location);
+ case PKW_IFNDEF:
+ return preprocessing_lex_ifndef_directive(lexer_data,token_location);
+ case PKW_ELIF:
+ return preprocessing_lex_if_directive(lexer_data,token_location);
+ case PKW_ELSE:
+ return preprocessing_return_else_token(lexer_data,token_location);
+ case PKW_ENDIF:
+ return preprocessing_return_endif_token(lexer_data,token_location);
+ case PKW_INCLUDE:
+ return preprocessing_lex_include_directive(lexer_data,token_location);
+ case PKW_DEFINE:
+ return preprocessing_lex_define_directive(lexer_data,token_location);
+ case PKW_UNDEF:
+ return preprocessing_lex_undef_directive(lexer_data,token_location);
+ case PKW_LINE:
+ return preprocessing_lex_line_directive(lexer_data,token_location);
+ case PKW_ERROR:
+ return preprocessing_lex_error_directive(lexer_data,token_location);
+ case PKW_PRAGMA:
+ return get_error_token("PREPROCESSING PRAGMA NOT DONE",token_location,lexer_data->previous_token_location,lexer_data->program);
+ case PKW_DEFINED:
+ return preprocessing_lex_defined_unary_operator(lexer_data,token_location);
+ default:
+ return lexer_make_token_finishing_on_node(lexer_data,finishing_node,start_position);
+ }
+ wonky_assert(SHOULD_NOT_REACH_HERE);
+ }else
+ {
+ return lexer_make_token_finishing_on_node(lexer_data,finishing_node,start_position);
}
- wonky_assert(SHOULD_NOT_REACH_HERE);
}
struct token* preprocessing_lex_line_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
hold_hold_token=preprocessing_extract_next_token(lexer_data);
if(hold_hold_token==NULL)
{
- ret=(struct token_normal_define_directive*)get_normal_define_directive_token(where,lexer_data->previous_token_location,hold_id);
+ ret=get_normal_define_directive_token(where,lexer_data->previous_token_location,hold_id);
delete_token((struct token*)hold_token);
return ret;
}else if(hold_hold_token->type==KW_OPEN_NORMAL)
while(!preprocessing_eol(lexer_data))
Queue_Push(queue,preprocessing_extract_next_token(lexer_data));
}
+ /*You must not call this when eol*/
struct token* preprocessing_get_token_for_functionlike_macro_substitution_list(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive)
{
struct token *hold_token;
+ /*returns NULL on eol*/
hold_token=preprocessing_extract_next_token(lexer_data);
+ /*no eol tokens*/
wonky_assert(hold_token!=NULL);
if(hold_token->type==KW_ID && ((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive==directive)
{
return preprocessing_lex_hastag_unary_operator(lexer_data,where,directive);
}else
+ {
return hold_token;
+ }
}
void preprocessing_push_functionlike_macro_substitution_tokens(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive)
{
struct token *hold_token;
- struct token *hold_next_token;
while(!preprocessing_eol(lexer_data))
{
hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
-
- hold_next_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
- while(hold_next_token!=NULL);
+ if(preprocessing_get_and_check_token(lexer_data,KW_HASHTAG_HASHTAG))
{
- if(hold_next_token->type==KW_HASHTAG_HASHTAG)
+ if(preprocessing_eol(lexer_data))
+ {
+ push_lexing_error("## is the last token in the functionlike macro substitution tokens",lexer_data);
+ return;
+ }else
{
- hold_token=preprocessing_lex_hastag_hashtag_operator(lexer_data,where,directive,hold_token);
- delete_token(hold_next_token);
+ hold_token=preprocessing_lex_hastag_hashtag_operator_in_functionlike_macro(lexer_data,where,directive,hold_token);
}
- hold_next_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
}
-
-
-
- Queue_Push(directive->replacement_tokens,hold_token);
+ Queue_Push(directive->replacement_tokens,hold_token);
}
}
void preprocessing_parse_functionlike_macro_id_list(struct Lexer_Data *lexer_data,struct token_functionlike_define_directive *directive)
F diff --git a/src/frontend/lex/lex_preprocessing_directive.h b/src/frontend/lex/lex_preprocessing_directive.h --- a/src/frontend/lex/lex_preprocessing_directive.h +++ b/src/frontend/lex/lex_preprocessing_directive.h
struct token* preprocessing_lex_defined_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_lex_hastag_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive);
- struct token* preprocessing_lex_hastag_hashtag_operator(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive,struct token *previous_token);
+ struct token* preprocessing_lex_hastag_hashtag_operator_in_functionlike_macro(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive,struct token *previous_token);
struct token* preprocessing_return_else_token(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_return_endif_token(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_extract_next_token(struct Lexer_Data *lexer_data);
struct token* preprocessing_extract_next_directive(struct Lexer_Data *lexer_data);
struct token* preprocessing_extract_next_token_inner(struct Lexer_Data *lexer_data,_Bool extract_directive);
+ _Bool preprocessing_get_and_check_token(struct Lexer_Data *lexer_data,enum LEXER_TYPE token_type);
struct Automata_Node* preprocessing_feed_automata_until_error(struct Lexer_Data *lexer_data);
struct Automata_Node* preprocessing_feed_automata_next_char(struct Lexer_Data *lexer_data,struct Automata_Node *node);
F diff --git a/src/frontend/lex/lexer.c b/src/frontend/lex/lexer.c --- a/src/frontend/lex/lexer.c +++ b/src/frontend/lex/lexer.c
ret->src=src;
ret->program=program;
ret->previous_token_location=&start_of_file;
+ ret->buffer_token=NULL;
return ret;
struct Automata_Node *hold_node;
size_t where_does_the_token_start_in_the_source_file;
+ if(lexer_data->buffer_token!=NULL)
+ {
+ ret=lexer_data->buffer_token;
+ lexer_data->buffer_token=NULL;
+ return ret;
+ }
+
do{
switch(finishing_node->keyword)
{
+ case KW_HASHTAG_HASHTAG:
+ if(lexer_data->automata_view==AUTOMATA_VIEW_PREPROCESSING_DIRECTIVE)
+ {
+ return get_small_token(KW_HASHTAG_HASHTAG,token_location,lexer_data->previous_token_location);
+ }else
+ {
+ return get_error_token("Ran into ## while not in a preprocessing directive. Invalid syntax.",token_location,lexer_data->previous_token_location,lexer_data->program);
+ }
+ break;
case KW_HASHTAG:
if(lexer_data->automata_view==AUTOMATA_VIEW_PREPROCESSING_DIRECTIVE)
{
return preprocessing_lex_directive(lexer_data,where);
}
+ _Bool lex_get_and_check(struct Lexer_Data *lexer_data,struct Source_Location *where,enum LEXER_TYPE token_type)
+ {
+ if(lexer_data->buffer_token==NULL)
+ lexer_data->buffer_token=lexer_extract_next_token(lexer_data);
+
+ if(lexer_data->buffer_token && lexer_data->buffer_token->type==token_type)
+ {
+ lexer_data->buffer_token=NULL;
+ return 1;
+ }else
+ {
+ return 0;
+ }
+ }
void delete_lexer_data(struct Lexer_Data *lexer_data)
{
wonky_free(lexer_data);
F diff --git a/src/frontend/lex/lexer.h b/src/frontend/lex/lexer.h --- a/src/frontend/lex/lexer.h +++ b/src/frontend/lex/lexer.h
struct Source_Location *previous_token_location;
+ struct token *buffer_token;
+
};
struct token *lex_defined_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
-
struct token *lex_preprocessing_directive(struct Lexer_Data *lexer_data,struct Source_Location *where);
+ _Bool lex_get_and_check(struct Lexer_Data *lexer_data,struct Source_Location *where,enum LEXER_TYPE token_type);
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c --- a/src/semantics/program/translation_unit.c +++ b/src/semantics/program/translation_unit.c
ret->program=program;
ret->macro_expansion_number=1;
+ ret->next_barrier_number=1;
return ret;
}
struct token *hold_token;
struct Preprocessing_Translation_Unit *hold_unit;
char *include_name;
+ unsigned int barrier;
wonky_assert(include_directive->tokens->first);
token_ptr_jump_to_first(ptr,include_directive->tokens);
+ barrier=token_ptr_set_barrier(ptr);
ptr->state=TOKEN_POINTER_STATE_PREPROCESSING;
-
hold_token=token_ptr_get_token_under_pointer(ptr);
-
ptr->state=TOKEN_POINTER_STATE_NORMAL;
+ wonky_assert(barrier==token_ptr_get_current_barrier_number(ptr));
+ token_ptr_clear_barrier(ptr);
if(hold_token->type==KW_STRING)
{
token_ptr_goto_next_token(ptr);
}else if(hold_token->type==KW_LESS)
{
+ barrier=token_ptr_set_barrier(ptr);
ptr->state=TOKEN_POINTER_STATE_PREPROCESSING;
include_name=gstrncpy("",2);
hold_token=token_ptr_get_token_under_pointer(ptr);
hold_token=token_ptr_get_token_under_pointer(ptr);
}
ptr->state=TOKEN_POINTER_STATE_NORMAL;
+ wonky_assert(barrier==token_ptr_get_current_barrier_number(ptr));
+ token_ptr_clear_barrier(ptr);
if(ptr->context->current_token_node!=NULL)
{
token_ptr_jump_to_first(ptr,hold_unit->tokens);
else
token_ptr_goto_next_token(ptr);
- }else if(hold_token->type==KW_LESS_EQ)
+ }else if(hold_token->type==KW_LESS_EQ) /*implementation defined*/
{
push_token_ptr_error("'=' is not supported inside filename in include directive with angular brackets and macro expansion",ptr);
return;
struct AST *control;
struct Translation_Data *dummy_data;
struct Scope *dummy_scope;
+ unsigned int barrier;
token_ptr_goto_next_token(ptr);
ptr->state=TOKEN_POINTER_STATE_PREPROCESSING;
ptr->is_in_conditional_directive=1;
+ barrier=token_ptr_set_barrier(ptr);
+
control=parse_expression(dummy_data,dummy_scope);
- ptr->state=TOKEN_POINTER_STATE_NORMAL;
+
+ wonky_assert(barrier==token_ptr_get_current_barrier_number(ptr));
+ token_ptr_clear_barrier(ptr);
ptr->is_in_conditional_directive=0;
+ ptr->state=TOKEN_POINTER_STATE_NORMAL;
if(evaluate_const_expression_integer(control,dummy_data)!=0)
{
Queue_Push(ptr->context->ungeted_tokens,hold_string_token);
}
- void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,struct token_hashtag_hastag_unary_operator *op)
+ void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,struct token_hashtag_hastag_operator *op)
{
- #warning make this work in normal macros, not only in functionlike ones!
+ unsigned int barrier;
+
+ barrier=token_ptr_set_barrier(ptr);
+ token_ptr_goto_next_token(ptr);
+
+
+
+ wonky_assert(barrier==token_ptr_get_current_barrier_number(ptr));
+ token_ptr_clear_barrier(ptr);
}
void token_ptr_load_functionlike_macro_arguments_with_tokens(struct Token_Pointer *ptr,struct functionlike_define_directive *macro)
{
case PKW_HASHTAG_UNARY_OP:
token_ptr_execute_stringify_functionlike_macro_argument(token_pointer,((struct token_hashtag_unary_operator*)token));
return 1;
+ case PKW_HASHTAG_HASHTAG_OP:
+ token_ptr_execute_concat_functionlike_macro_arguments(token_pointer,((struct token_hashtag_hastag_operator*)token));
+ return 1;
default:
return 0;
}
ret->column=hold_location->column;
ret->filename=hold_location->src_name->name;
ret->filename_size=hold_location->src_name->name_size;
+ ret->barrier_number=0;
Queue_Init(ret->ungeted_tokens);
{
if(ptr->state==TOKEN_POINTER_STATE_ERROR)
return 0;
-
if(token_ptr_has_buffered_tokens(ptr))
{
return 1;
{
while(ptr->context->number_of_remaining_tokens==0 &&
- //ptr->state!=TOKEN_POINTER_STATE_PREPROCESSING &&
+ ptr->context->barrier_number==0 &&
ptr->call_stack->size>0)
{
delete_token_ptr_context(ptr->context);
else
return NULL;
}
+ unsigned int token_ptr_get_current_barrier_number(struct Token_Pointer *ptr)
+ {
+ return ptr->context->barrier_number;
+ }
+ unsigned int token_ptr_set_barrier(struct Token_Pointer *ptr)
+ {
+ return ptr->context->barrier_number=ptr->next_barrier_number++;
+ }
+ void token_ptr_clear_barrier(struct Token_Pointer *ptr)
+ {
+ ptr->context->barrier_number=0;
+ }
void token_ptr_jump_to(struct Token_Pointer *ptr,struct Queue_Node *where_to,size_t number_of_remaining_tokens)
{
struct Token_Pointer_Context *new_context;
F diff --git a/src/semantics/program/translation_unit.h b/src/semantics/program/translation_unit.h --- a/src/semantics/program/translation_unit.h +++ b/src/semantics/program/translation_unit.h
size_t column;
char *filename;
size_t filename_size;
+ unsigned int barrier_number;/*0 means no barrier*/
};
struct Token_Pointer
{
_Bool is_in_conditional_directive;/*TODO move this into the state*/
size_t macro_expansion_number;
+ unsigned int next_barrier_number;
};
struct Preprocessing_Translation_Unit* get_preprocessing_translation_unit(struct Source_File *source);
void token_ptr_execute_stringify_functionlike_macro_argument(struct Token_Pointer *ptr,struct token_hashtag_unary_operator *op);
- void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,struct token_hashtag_hastag_unary_operator *op);
+ void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,struct token_hashtag_hastag_operator *op);
struct Queue_Node* token_ptr_get_current_queue_node(struct Token_Pointer *ptr);
+ /*0 means no barrier in current context*/
+ unsigned int token_ptr_get_current_barrier_number(struct Token_Pointer *ptr);
+ /*returns set barrier number*/
+ unsigned int token_ptr_set_barrier(struct Token_Pointer *ptr);
+ void token_ptr_clear_barrier(struct Token_Pointer *ptr);
+
void token_ptr_jump_to(struct Token_Pointer *ptr,struct Queue_Node *where_to,size_t number_of_remaining_tokens);
void token_ptr_jump_to_first(struct Token_Pointer *ptr,struct Queue *queue);
void delete_token_ptr_context(struct Token_Pointer_Context *context);
F diff --git a/src/syntax/automatas/generator/generator.c b/src/syntax/automatas/generator/generator.c --- a/src/syntax/automatas/generator/generator.c +++ b/src/syntax/automatas/generator/generator.c
fprintf(out,"const char *lexer_type_to_string_map[LEXER_TYPE_END]=\n\t{\n",number_of_chonky_keywords);
for(size_t i=0;i<number_of_chonky_keywords;++i)
{
- fprintf(out,"\t\t[%s]=\"%s\",\n",chonky_keywords[i].kw_string, chonky_keywords[i].keyword);
+ fprintf(out,"\t\t[%s]=\"%s\",\n",chonky_keywords[i].preprocessing_kw_string, chonky_keywords[i].keyword);
}
fprintf(out,"\n\t};");
}
F diff --git a/src/syntax/source_file.c b/src/syntax/source_file.c --- a/src/syntax/source_file.c +++ b/src/syntax/source_file.c
return ret;
}
+ struct Source_File* get_temp_source_file()
+ {
+ struct Source_File *ret;
+ ret=wonky_malloc(sizeof(struct Source_File));
+ ret->type=SOURCE_TEXT_FULL_TEXT;
+ ret->src_name=get_source_name("scratch pad");
+ ret->src=NULL;
+ ret->src_size=0;
+ ret->canonic_name_size=0;
+
+ return ret;
+ }
struct Source_Location* get_source_location(size_t line,size_t column,size_t on_which_byte,struct Source_Name *src_name)
{
return ret;
}
+ void source_file_expand(struct Source_File *src,size_t expand_byte_count)
+ {
+ if(src->src)
+ {
+ char *hold_new_src;
+ size_t i;
+ hold_new_src=wonky_malloc(src->src_size+expand_byte_count);
+ gmemmove(hold_new_src,src->src,src->src_size);
+ wonky_free(src->src);
+ for(i=0;i<expand_byte_count;++i)
+ src->src[src->src_size+i]='\0';
+ src->src_size+=expand_byte_count;
+ }else
+ {
+ src->src=wonky_calloc(expand_byte_count,1);
+ src->src_size=expand_byte_count;
+ }
+ }
void delete_source_file(struct Source_File *src)
{
wonky_free(src);
F diff --git a/src/syntax/source_file.h b/src/syntax/source_file.h --- a/src/syntax/source_file.h +++ b/src/syntax/source_file.h
};
struct Source_File
{
- struct Source_Name *src_name;
enum Source_Text_Type type;
+ struct Source_Name *src_name;
char *src;
size_t src_size;
size_t canonic_name_size;
struct Source_File* get_source_file_from_string(char *filename,size_t filename_size,struct Program *program);
+ struct Source_File* get_temp_source_file();
+ struct Source_File* get_source_file_from_tokens(struct Token_Pointer *ptr);
struct Source_Location* get_source_location(size_t line,size_t column,size_t on_which_byte,struct Source_Name *src_name);
struct Source_Location_Delta* get_source_location_delta(struct Source_Location *begining,struct Source_Location *ending);
- struct Source_File* get_source_file_from_tokens(struct Token_Pointer *ptr);
+ void source_file_expand(struct Source_File *src,size_t expand_byte_count);
struct Source_Name* get_source_name(char *constructed_name);
void delete_source_file(struct Source_File *src);
+
+
#endif
F diff --git a/src/syntax/token/token.c b/src/syntax/token/token.c --- a/src/syntax/token/token.c +++ b/src/syntax/token/token.c
ret->type=PKW_INCLUDE;
return (struct token*)ret;
}
+ struct token* get_hashtag_hashtag_operator(struct Source_Location *current_location,struct Source_Location *previous_location,struct Queue *operands)
+ {
+ struct token_hashtag_hastag_operator *ret;
+ ret=wonky_malloc(sizeof(struct token_hashtag_hastag_operator));
+ ret->type=PKW_HASHTAG_HASHTAG_OP;
+ ret->delta=get_source_location_delta(previous_location,current_location);
+ ret->operands=operands;
+
+ return (struct token*)ret;
+ }
struct token* get_error_token(const char *msg,struct Source_Location *current_location,struct Source_Location *previous_location,struct Program *program,...)
{
struct token_error *ret;
*size=sizeof("constant")-1;
}else
{
- /*hack*/
- ret=(char*)lexer_type_to_string_map[token->type];
+ /*hack.*/
+ ret=(char*)lexer_type_to_string_map[token->type];
if(ret==NULL) ret="";
*size=gstrnlen(ret,100);
}
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h --- a/src/syntax/token/token.h +++ b/src/syntax/token/token.h
{
enum LEXER_TYPE type;
struct Source_Location_Delta *delta;
- struct token *left;
- struct token *right;
+ struct Queue *operands; /*queue of id and/or functionlike macro argument tokens*/
};
struct token_error
struct token* get_pragma_directive(struct Source_Location *current_location,struct Source_Location *previous_location,enum Pragma_Type type);
struct token* get_defined_unary_operator(struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id);
struct token* get_hashtag_unary_operator(struct Source_Location *current_location,struct Source_Location *previous_location,struct token_functionlike_define_directive *operand);
- struct token* get_hashtag_hashtag_unary_operator(struct Source_Location *current_location,struct Source_Location *previous_location,struct Queue *operands);
+ struct token* get_hashtag_hashtag_operator(struct Source_Location *current_location,struct Source_Location *previous_location,struct Queue *operands);
struct token* get_error_token(const char *msg,struct Source_Location *current_location,struct Source_Location *previous_location,struct Program *program,...);
struct token* get_eof_token();
struct token* get_token_from_two_adjacent_strings(struct token_string *first,struct token_string *second);
F diff --git a/src/syntax/token/token.hh b/src/syntax/token/token.hh --- a/src/syntax/token/token.hh +++ b/src/syntax/token/token.hh
struct token_pragma_directive;
struct token_defined_unary_operator;
struct token_hashtag_unary_operator;
- struct token_hashtag_hastag_unary_operator;
+ struct token_hashtag_hastag_operator;
struct token_error;
enum Punctuator_Token_Type