WONKY



LOG | FILES | OVERVIEW


F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c --- a/src/frontend/lex/lex_preprocessing_directive.c +++ b/src/frontend/lex/lex_preprocessing_directive.c
}
return (struct token*)ret;
}
- struct token* preprocessing_lex_hastag_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where)
+ struct token* preprocessing_lex_hastag_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive)
{
struct token_hashtag_unary_operator *ret;
struct token *hold_token;
ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
if(!preprocessing_eol(lexer_data))
- hold_token=preprocessing_extract_next_token(lexer_data);
+ hold_token=preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive);
if(hold_token==NULL || hold_token->type!=PKW_MACRO_ARGUMENT)
{
push_lexing_error("Expected macro argument after #",lexer_data);
while(!preprocessing_eol(lexer_data))
Queue_Push(queue,preprocessing_extract_next_token(lexer_data));
}
- void preprocessing_push_functionlike_macro_substitution_tokens(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive)
+ struct token* preprocessing_get_token_for_functionlike_macro_substitution_list(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive)
{
+
struct token *hold_token;
- while(!preprocessing_eol(lexer_data))
- {
- hold_token=preprocessing_extract_next_token(lexer_data);
- wonky_assert(hold_token!=NULL);
+ hold_token=preprocessing_extract_next_token(lexer_data);
- if(hold_token->type==KW_ID && ((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive==directive)
- {
- #warning this warning was placed when making the # and ## operations
+ wonky_assert(hold_token!=NULL);
+ if(hold_token->type==KW_ID && ((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive==directive)
+ {
+ return get_functionlike_define_directive_argument_token(
+ where,
+ lexer_data->previous_token_location,
+ ((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive_argument);
+ }else if(token_is_keyword(hold_token) && ((struct token_keyword*)hold_token)->id->hold_functionlike_define_directive==directive)
+ {
+ return get_functionlike_define_directive_argument_token(
+ where,
+ lexer_data->previous_token_location,
+ ((struct token_keyword*)hold_token)->id->hold_functionlike_define_directive_argument);
+ }else if(hold_token->type==KW_HASHTAG)
+ {
+ return preprocessing_lex_hastag_unary_operator(lexer_data,where,directive);
+ }else
+ return hold_token;
- Queue_Push(directive->replacement_tokens,
- get_functionlike_define_directive_argument_token(
- where,
- lexer_data->previous_token_location,
- ((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive_argument));
- delete_token(hold_token);
- }else if(token_is_keyword(hold_token) && ((struct token_keyword*)hold_token)->id->hold_functionlike_define_directive==directive)
- {
- Queue_Push(directive->replacement_tokens,
- get_functionlike_define_directive_argument_token(
- where,
- lexer_data->previous_token_location,
- ((struct token_keyword*)hold_token)->id->hold_functionlike_define_directive_argument));
- delete_token(hold_token);
- }else if(hold_token->type==KW_HASHTAG)
- {
- Queue_Push(directive->replacement_tokens,preprocessing_lex_hastag_unary_operator(lexer_data,where));
- }else
- {
- Queue_Push(directive->replacement_tokens,hold_token);
- }
- }
+
+ }
+ void preprocessing_push_functionlike_macro_substitution_tokens(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive)
+ {
+ while(!preprocessing_eol(lexer_data))
+ Queue_Push(directive->replacement_tokens,preprocessing_get_token_for_functionlike_macro_substitution_list(lexer_data,where,directive));
}
void preprocessing_parse_functionlike_macro_id_list(struct Lexer_Data *lexer_data,struct token_functionlike_define_directive *directive)
{
F diff --git a/src/frontend/lex/lex_preprocessing_directive.h b/src/frontend/lex/lex_preprocessing_directive.h --- a/src/frontend/lex/lex_preprocessing_directive.h +++ b/src/frontend/lex/lex_preprocessing_directive.h
struct token* preprocessing_lex_pragma_directive(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_lex_defined_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
- struct token* preprocessing_lex_hastag_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
+ struct token* preprocessing_lex_hastag_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive);
struct token* preprocessing_lex_hastag_hashtag_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_return_else_token(struct Lexer_Data *lexer_data,struct Source_Location *where);
void preprocessing_push_tokens_into_queue_until_eol(struct Lexer_Data *lexer_data,struct Queue *queue);
+ struct token* preprocessing_get_token_for_functionlike_macro_substitution_list(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive);
void preprocessing_push_functionlike_macro_substitution_tokens(struct Lexer_Data *lexer_data,struct Source_Location *where,struct functionlike_define_directive *directive);
-
void preprocessing_goto_end_of_line(struct Lexer_Data *lexer_data);
struct token* preprocessing_extract_next_token(struct Lexer_Data *lexer_data);
F diff --git a/src/frontend/lex/lexer.c b/src/frontend/lex/lexer.c --- a/src/frontend/lex/lexer.c +++ b/src/frontend/lex/lexer.c
case KW_HASHTAG:
if(lexer_data->automata_view==AUTOMATA_VIEW_PREPROCESSING_DIRECTIVE)
{
- return preprocessing_lex_hastag_unary_operator(lexer_data,token_location);
+ return get_small_token(KW_HASHTAG,token_location,lexer_data->previous_token_location);
}else
{
if(!lexer_data->is_in_the_begining_of_line)
F diff --git a/src/semantics/ast.c b/src/semantics/ast.c --- a/src/semantics/ast.c +++ b/src/semantics/ast.c
hold_function=get_enclosing_function(scope);
return_type=((struct Type_Function*)hold_function->type)->return_type;
+ #warning does not seem to work with nop "return;"
if(constraint_check_return_statement(return_expression,return_type,translation_data))
{
ret=wonky_malloc(sizeof(struct AST_If_Statement));
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c --- a/src/semantics/program/translation_unit.c +++ b/src/semantics/program/translation_unit.c
char *hold_str;
size_t hold_str_size;
+ token_ptr_goto_next_token(ptr);
+
+
/*This wastes a LOT OF MEMORY. TODO make a temp token allocation scheme*/
hold_string_token=(struct token_string*)get_string_token(KW_STRING,op->delta->location,op->delta->location,"",0);
{
hold_str=((struct token_identifier*)hold_token)->id->data;
hold_str_size=((struct token_identifier*)hold_token)->id->size;
+ }else if(hold_token->type==KW_CONSTANT)
+ {
+ hold_str="constant";
+ hold_str_size=sizeof("constant")-1;
}else
{
/*hack*/
hold_str=(char*)lexer_type_to_string_map[hold_token->type];
+ if(hold_str==NULL) hold_str="";
hold_str_size=gstrnlen(hold_str,100);
}
hold_string_token=(struct token_string*)
(struct token_string*)get_string_token(KW_STRING,hold_token->delta->location,hold_token->delta->location,hold_str,hold_str_size)
);
}
+ Queue_Push(ptr->context->ungeted_tokens,hold_string_token);
}
void token_ptr_load_functionlike_macro_arguments_with_tokens(struct Token_Pointer *ptr,struct functionlike_define_directive *macro)
case PKW_MACRO_ARGUMENT:
token_ptr_execute_functionlike_macro_argument(token_pointer,((struct token_functionlike_define_argument*)token)->argument);
return 1;
+ case PKW_HASHTAG_UNARY_OP:
+ token_ptr_execute_stringify_functionlike_macro_argument(token_pointer,((struct token_hashtag_unary_operator*)token));
+ return 1;
default:
return 0;
}
F diff --git a/src/syntax/token/token.c b/src/syntax/token/token.c --- a/src/syntax/token/token.c +++ b/src/syntax/token/token.c
#define WONKY_TOKEN_C WONKY_TOKEN_C
#include <token.h>
+ struct token* get_small_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct Source_Location *previous_location)
+ {
+ struct token_identifier *ret;
+
+ ret=wonky_malloc(sizeof(struct token_identifier));
+ ret->type=type;
+ ret->delta=get_source_location_delta(previous_location,current_location);
+
+ return (struct token*)ret;
+ }
struct token* get_id_token(struct identifier *id,struct Source_Location *current_location,struct Source_Location *previous_location)
{
struct token_identifier *ret;
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h --- a/src/syntax/token/token.h +++ b/src/syntax/token/token.h
void handle_splicing(struct token *word);
char compare_tokens(struct token *a,struct token *b);
+ struct token* get_small_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct Source_Location *previous_location);
+
struct token* get_id_token(struct identifier *id,struct Source_Location *current_location,struct Source_Location *previous_location);
struct token* get_keyword_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct Source_Location *previous_location,struct identifier *id);
struct token* get_punctuator_token(enum LEXER_TYPE type,struct Source_Location *current_location,struct Source_Location *previous_location);