WONKY



LOG | FILES | OVERVIEW


F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c --- a/src/frontend/lex/lex_preprocessing_directive.c +++ b/src/frontend/lex/lex_preprocessing_directive.c
return (struct token*)ret;
}
- struct token* preprocessing_lex_hastag_hashtag_operator(struct Lexer_Data *lexer_data,struct Source_Location *where)
+ struct token* preprocessing_lex_hastag_hashtag_operator(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive,struct token *previous_token)
{
- #warning this is not yet done!
+ struct token_hashtag_hastag_operator *ret;
+ struct token *hold_token;
+
+ ret=wonky_malloc(sizeof(struct token_hashtag_hastag_operator));
+ ret->type=KW_HASHTAG_HASHTAG;
+ ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+ ret->left=previous_token;
+
+
+ hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
+
+ if(hold_token==NULL)
+ {
+ push_lexing_error("Expected something after ##",lexer_data);
+ return (struct token*)ret;
+ }else
+ {
+ ret->right=hold_token;
+ return (struct token*)ret;
+ }
+ wonky_assert(SHOULD_NOT_REACH_HERE);
}
struct token* preprocessing_extract_next_directive(struct Lexer_Data *lexer_data)
{
Queue_Init(ret->tokens);
- #warning a hashtag include "asdfadf" [ space ] makes wonky get corrupted...
+ #warning a hashtag include "asdfadf" [ space ] makes wonky get corrupted ... ...
while(!preprocessing_eol(lexer_data))
{
}
void preprocessing_push_functionlike_macro_substitution_tokens(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive)
{
+ struct token *hold_token;
+ struct token *hold_next_token;
+
while(!preprocessing_eol(lexer_data))
- Queue_Push(directive->replacement_tokens,preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive));
+ {
+ hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
+
+ hold_next_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
+ while(hold_next_token!=NULL);
+ {
+ if(hold_next_token->type==KW_HASHTAG_HASHTAG)
+ {
+ hold_token=preprocessing_lex_hastag_hashtag_operator(lexer_data,where,directive,hold_token);
+ delete_token(hold_next_token);
+ }
+ hold_next_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
+ }
+
+
+
+ Queue_Push(directive->replacement_tokens,hold_token);
+ }
}
void preprocessing_parse_functionlike_macro_id_list(struct Lexer_Data *lexer_data,struct token_functionlike_define_directive *directive)
{
F diff --git a/src/frontend/lex/lex_preprocessing_directive.h b/src/frontend/lex/lex_preprocessing_directive.h --- a/src/frontend/lex/lex_preprocessing_directive.h +++ b/src/frontend/lex/lex_preprocessing_directive.h
struct token* preprocessing_lex_defined_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_lex_hastag_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive);
- struct token* preprocessing_lex_hastag_hashtag_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
+ struct token* preprocessing_lex_hastag_hashtag_operator(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive,struct token *previous_token);
struct token* preprocessing_return_else_token(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_return_endif_token(struct Lexer_Data *lexer_data,struct Source_Location *where);
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c --- a/src/semantics/program/translation_unit.c +++ b/src/semantics/program/translation_unit.c
{
hold_token=(struct token*)it->data;
wonky_assert(is_valid_token(hold_token));
- if(hold_token->type==KW_ID)
- {
- hold_str=((struct token_identifier*)hold_token)->id->data;
- hold_str_size=((struct token_identifier*)hold_token)->id->size;
- }else if(hold_token->type==KW_CONSTANT)
- {
- hold_str="constant";
- hold_str_size=sizeof("constant")-1;
- }else
- {
- /*hack*/
- hold_str=(char*)lexer_type_to_string_map[hold_token->type];
- if(hold_str==NULL) hold_str="";
- hold_str_size=gstrnlen(hold_str,100);
- }
+ hold_str=get_string_from_token(hold_token,&hold_str_size);
hold_string_token=(struct token_string*)
get_token_from_two_strings_with_a_space_between(
hold_string_token,
Queue_Push(ptr->context->ungeted_tokens,hold_string_token);
}
+ void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,struct token_hashtag_hastag_unary_operator *op)
+ {
+ #warning make this work in normal macros, not only in functionlike ones!
+
+ }
void token_ptr_load_functionlike_macro_arguments_with_tokens(struct Token_Pointer *ptr,struct functionlike_define_directive *macro)
{
int open_bracket_count=1;
F diff --git a/src/semantics/program/translation_unit.h b/src/semantics/program/translation_unit.h --- a/src/semantics/program/translation_unit.h +++ b/src/semantics/program/translation_unit.h
void token_ptr_execute_stringify_functionlike_macro_argument(struct Token_Pointer *ptr,struct token_hashtag_unary_operator *op);
-
- //void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,
+ void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,struct token_hashtag_hastag_unary_operator *op);
F diff --git a/src/syntax/token/token.c b/src/syntax/token/token.c --- a/src/syntax/token/token.c +++ b/src/syntax/token/token.c
return (struct token*)get_string_token(KW_STRING,current_location,current_location,"Time could not be determined",sizeof("Time could not be determined"));
}
- char* get_string_from_token(struct token* token)
+ char* get_string_from_token(struct token* token,size_t *size)
{
- return "asdfasdf";
+ char *ret;
+
+ if(token->type==KW_ID)
+ {
+ ret=((struct token_identifier*)token)->id->data;
+ *size=((struct token_identifier*)token)->id->size;
+ }else if(token_is_keyword(token))
+ {
+ ret=((struct token_keyword*)token)->id->data;
+ *size=((struct token_keyword*)token)->id->size;
+ }else if(token->type==KW_CONSTANT)
+ {
+ ret=="constant";
+ *size=sizeof("constant")-1;
+ }else
+ {
+ /*hack*/
+ ret=(char*)lexer_type_to_string_map[token->type];
+ if(ret==NULL) ret="";
+ *size=gstrnlen(ret,100);
+ }
+ return ret;
}
void delete_token(struct token* token)
{
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h --- a/src/syntax/token/token.h +++ b/src/syntax/token/token.h
{
enum LEXER_TYPE type;
struct Source_Location_Delta *delta;
- struct Queue *operands;
+ struct token *left;
+ struct token *right;
};
struct token_error
struct token* get_stdc_hosted_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
struct token* get_stdc_version_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
struct token* get_time_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
- char* get_string_from_token(struct token* token);
+ char* get_string_from_token(struct token* token,size_t *size);
_Bool token_is_keyword(struct token *token);
_Bool token_is_identifier_in_preprocessing(struct token *token);