F diff --git a/lex/lexer.c b/lex/lexer.c
--- a/lex/lexer.c
+++ b/lex/lexer.c
{
- current_token=get_next_token(src,&chonky[0]);
+ current_token=get_next_token(src,&chonky[0],1);
if(current_token->type==KW_HASHTAG)
{
parse_preproc_line(src,translation_data);
}else if(current_token->type!=KW_NOTYPE)
{
- Queue_Push(translation_data->tokens,current_token);
+ //Queue_Push(translation_data->tokens,current_token);
+ expand_macro(current_token,src,translation_data);
}else
{
if(src->where_in_src!=src->src_size)
{
while(src->src[src->where_in_src]!='\n' && src->src[src->where_in_src]!='\0')
{
+ skip_line_splice(src);
if(src->src[src->where_in_src]!=' ' && src->src[src->where_in_src]!='\t')
{
- /*TODO make comments acceptable (spliced lines also)*/
push_lexing_error("expected a new line",src,translation_data);
break;
}else
src->which_column=0;
++src->which_row;
}
- void skip_white_space(struct Source_File *src)
+ /*returns the number of bytes skipped*/
+ size_t skip_line_splice(struct Source_File *src)
{
- while(src->src[src->where_in_src]==' ' || src->src[src->where_in_src]=='\n' || src->src[src->where_in_src]=='\t')
+ size_t current_size=0;
+ while(src->where_in_src<src->src_size-1 && src->src[src->where_in_src]=='\\' && src->src[src->where_in_src+1]=='\n')
+ {
+ src->where_in_src+=2;
+ current_size+=2;
+ }
+ return current_size;
+ }
+
+ void skip_white_space(struct Source_File *src,char skip_new_line)
+ {
+ while(src->src[src->where_in_src]==' ' || (src->src[src->where_in_src]=='\n' && skip_new_line) || src->src[src->where_in_src]=='\t')
{
if(src->src[src->where_in_src]=='\n')
{
}
- struct token* get_next_token(struct Source_File *src,struct automata_entry *start_state)
+ struct token* get_next_token(struct Source_File *src,struct automata_entry *start_state,char skip_new_line)
{
int temp;
size_t current_size;
best_state=current_state=start_state;
/*ignore leading spaces,tabs and newlines*/
- skip_white_space(src);
+ skip_white_space(src,skip_new_line);
while(src->src[src->where_in_src]!='\0')
{
-
- if(src->where_in_src<src->src_size-1 && src->src[src->where_in_src]=='\\' && src->src[src->where_in_src+1]=='\n')
- {
- src->where_in_src+=2;
- current_size+=2;
- }
+ current_size+=skip_line_splice(src);
current_state=current_state->delta[cmpr[src->src[src->where_in_src]]];
if(current_state==NULL)
{
current_size=0;
best_state=current_state=start_state;
- skip_white_space(src);
+ skip_white_space(src,1);
}else
{
ret=malloc(sizeof(struct token));
return ret;
}
- struct token* copy_token(struct token *token)
+ struct token* copy_token(struct token *src)
{
-
+ struct token *cpy;
+ cpy=malloc(sizeof(struct token));
+ *cpy=*src;
+ return cpy;
}
#endif
F diff --git a/lex/lexer.h b/lex/lexer.h
--- a/lex/lexer.h
+++ b/lex/lexer.h
void lex(struct Source_File *src,struct Translation_Data *translation_data);
- struct token* get_next_token(struct Source_File *src,struct automata_entry *start_state);
- struct token* copy_token(struct token *token);
+ struct token* get_next_token(struct Source_File *src,struct automata_entry *start_state,char skip_new_line);
+ struct token* copy_token(struct token *src);
struct token_vector Lex_Queue_Condense(struct Queue *tokens);
void handle_splicing(struct token *word);
void chase_new_line(struct Source_File *src,struct Translation_Data *translation_data);
- void skip_white_space(struct Source_File *src);
+ void skip_white_space(struct Source_File *src,char skip_new_line);
+ size_t skip_line_splice(struct Source_File *src);
char check(struct Translation_Data *translation_data,enum KEYWORDS kw,size_t ahead);
F diff --git a/lex/preprocessing.c b/lex/preprocessing.c
--- a/lex/preprocessing.c
+++ b/lex/preprocessing.c
void parse_preproc_line(struct Source_File *src,struct Translation_Data *translation_data)
{
struct token *hold;
- hold=get_next_token(src,&chonky_jr[0]);
+ hold=get_next_token(src,&chonky_jr[0],0);
switch(hold->type)
{
case PKW_INCLUDE:
case PKW_DEFINE:
free(hold);
parse_define_line(src,translation_data);
+ return;
default:
return;
/*TODO error*/
void parse_include_line(struct Source_File *src,struct Translation_Data *translation_data)
{
struct token *hold;
- hold=get_next_token(src,&chonky[0]);
+ hold=get_next_token(src,&chonky[0],0);
if(hold->type==KW_STRING)
{
char *where_to_search[]={src->src_name->base,NULL};
chase_new_line(src,translation_data);
}
- /*TODO TODO*/
+ /*skipped # and 'define'*/
void parse_define_line(struct Source_File *src,struct Translation_Data *translation_data)
{
+ struct token *hold_token;
+ struct token *macro_name;
+ struct define_directive *new_macro;
+ struct Queue *hold_tokens;
+ size_t number_of_arguments=0;
+ int *hold_index;
+
+
+ macro_name=get_next_token(src,&chonky[0],0);
+ if(macro_name->type!=KW_ID)
+ {
+ free(macro_name);
+ push_lexing_error("expected id after #define",src,translation_data);
+ return;
+ }
+
+ new_macro=get_define_directive(macro_name);
+ /*white space*/
+ hold_token=get_next_token(src,&chonky[0],0);
+ if(hold_token->type==KW_OPEN_NORMAL)
+ {
+ free(hold_token);
+ while(1)
+ {
+ hold_token=get_next_token(src,&chonky[0],0);
+ if(hold_token->type!=KW_ID)
+ {
+ push_lexing_error("expected id in define argument list",src,translation_data);
+ free(hold_token);
+ break;
+ }
+ hold_index=malloc(sizeof(int));
+ *hold_index=number_of_arguments;
+ Map_Push(new_macro->arguments,hold_token->data,hold_token->data_size,hold_index);
+ free(hold_token);
+ hold_token=get_next_token(src,&chonky[0],0);
+ if(hold_token->type!=KW_COMMA)
+ {
+ if(hold_token->type==KW_CLOSE_NORMAL)
+ {
+ free(hold_token);
+ break;
+ }else
+ {
+ push_lexing_error("expected ',' in define argument list",src,translation_data);
+ free(hold_token);
+ break;
+ }
+ }
+ free(hold_token);
+ ++number_of_arguments;
+ }
+ hold_token=get_next_token(src,&chonky[0],0);
+ make_define_argument_list(new_macro,number_of_arguments+1);
+
+ }else if(hold_token->type==KW_NOTYPE)
+ {
+ push_lexing_error("empty define directive",src,translation_data);
+ free(hold_token);
+ /*TODO destroy new define directive*/
+ /*TODO there is a memory leak here*/
+ return ;
+ }
+
+ /*push things*/
+
+ hold_tokens=translation_data->tokens;
+ translation_data->tokens=new_macro->macro_tokens;
+ /*there is something in hold_token*/
+ do{
+ expand_macro(hold_token,src,translation_data);
+ }while( (hold_token=get_next_token(src,&chonky[0],0))->type != KW_NOTYPE);
+
+ /*removing the notype token*/
+ free(hold_token);
+
+ translation_data->tokens=hold_tokens;
+ /*push the directive into the macro map*/
+ Map_Push(translation_data->macros,macro_name->data,macro_name->data_size,new_macro);
+ free(macro_name);
+
}
/*
id[(list)] tokens \n
ret->arguments=malloc(sizeof(struct Map));
Map_Init(ret->arguments);
+ ret->number_of_arguments=0;
ret->argument_list=NULL;
return ret;
}
+ void make_define_argument_list(struct define_directive* directive,size_t number_of_arguments)
+ {
+ size_t i;
+ assert(directive->number_of_arguments==0 && directive->argument_list==NULL);
+
+ directive->number_of_arguments=number_of_arguments;
+ directive->argument_list=malloc(sizeof(struct Queue)*number_of_arguments);
+
+ for(i=0;i<number_of_arguments;++i)
+ {
+ Queue_Init(directive->argument_list+i);
+ }
+ }
+
+ void expand_macro_argument(struct Queue *replacement_tokens,struct Translation_Data *translation_data)
+ {
+ struct Queue_Node *it;
+ for(it=replacement_tokens->first;it!=NULL;it=it->prev)
+ {
+ Queue_Push(translation_data->tokens,copy_token((struct token*)it->data));
+ }
+ }
+ void load_macro_arguments(struct define_directive *macro,struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct token *hold;
+ struct Queue *hack;
+ size_t i;
+
+ if(macro->number_of_arguments==0)
+ return;
+
+ hold=get_next_token(src,&chonky[0],1);
+ if(hold->type!=KW_OPEN_NORMAL)
+ {
+ push_lexing_error("expected '(' in macro expansion",src,translation_data);
+ free(hold);
+ return;
+ }
+ free(hold);
+
+ hack=translation_data->tokens;
+ for(i=0;i<macro->number_of_arguments-1;++i)
+ {
+ translation_data->tokens=macro->argument_list+i;
+ for(
+ hold=get_next_token(src,&chonky[0],1);
+ hold->type!=KW_COMMA && hold->type!=KW_NOTYPE;
+ hold=get_next_token(src,&chonky[0],1)
+ )
+ {
+ expand_macro(hold,src,translation_data);
+ }
+ if(hold->type==KW_NOTYPE)
+ {
+ push_lexing_error("expected ',' in macro argument list",src,translation_data);
+ free(hold);
+ goto cleanup;
+ }
+
+ }
+ translation_data->tokens=macro->argument_list+i;
+ for(
+ hold=get_next_token(src,&chonky[0],1);
+ hold->type!=KW_CLOSE_NORMAL;
+ hold=get_next_token(src,&chonky[0],1)
+ )
+ {
+ if(hold->type==KW_NOTYPE)
+ {
+ push_lexing_error("expected ')' in macro argument list",src,translation_data);
+ free(hold);
+ goto cleanup;
+ }
+ expand_macro(hold,src,translation_data);
+ }
+
+ cleanup:
+ translation_data->tokens=hack;
+
+
+ }
+ void flush_macro_arguments(struct define_directive *macro)
+ {
+ size_t i;
+ for(i=0;i<macro->number_of_arguments;++i)
+ {
+ while(macro->argument_list[i].size>0)
+ free(Queue_Pop(macro->argument_list+i));
+ }
+ }
+ /*macro name token is freed on expansion , if it is not a macro name it is pushed into token queue*/
+ void expand_macro(struct token* macro_name,struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct define_directive *hold=NULL;
+ struct token *hold_token;
+ int *index;
+ struct Queue_Node *it;
+
+ if(macro_name->type==KW_ID)
+ hold=Map_Check(translation_data->macros,macro_name->data,macro_name->data_size);
+ if(hold!=NULL)
+ {
+ free(macro_name);
+ load_macro_arguments(hold,src,translation_data);
+ if(translation_data->errors->size>0)
+ return;
+
+ for(it=hold->macro_tokens->first;it!=NULL;it=it->prev)
+ {
+ hold_token=(struct token*)it->data;
+ index=Map_Check(hold->arguments,hold_token->data,hold_token->data_size);
+ if(index!=NULL)
+ {
+ expand_macro_argument(hold->argument_list+*index,translation_data);
+ }else
+ {
+ Queue_Push(translation_data->tokens,copy_token(hold_token));
+ }
+ }
+ flush_macro_arguments(hold);
+ }else
+ {
+ /*this isn't a macro, so we just push it to the token queue*/
+ Queue_Push(translation_data->tokens,macro_name);
+ }
+ }
#endif
F diff --git a/lex/preprocessing.h b/lex/preprocessing.h
--- a/lex/preprocessing.h
+++ b/lex/preprocessing.h
#include <preprocessing.hh>
#include <program.h>
#include <lexer.h>
+ #include <chonky.h>
#include <gcc_error.h>
+ #include <map.h>
struct define_directive
{
/*the tokens of the macro*/
struct Queue *macro_tokens;
+ /*ints are stored here*/
struct Map *arguments;
/*put arguments here*/
- /*an array to pointers to token queues*/
- struct Queue **argument_list;
+ /*an array of token queues*/
+ size_t number_of_arguments;
+ struct Queue *argument_list;
};
void parse_preproc_line(struct Source_File *src,struct Translation_Data *translation_data);
/*define stuff*/
struct define_directive* get_define_directive(struct token* macro_name);
- struct token* copy_token(struct token *token);
- void expand_macro_argument(struct Queue *replacement_tokens);
- void expand_macro(struct token* macro_name,struct Translation_Data *translation_data);
+ void make_define_argument_list(struct define_directive* directive,size_t number_of_arguments);
+
+ void expand_macro_argument(struct Queue *replacement_tokens,struct Translation_Data *translation_data);
+ void load_macro_arguments(struct define_directive *macro,struct Source_File *src,struct Translation_Data *translation_data);
+ void flush_macro_arguments(struct define_directive *macro);
+ void expand_macro(struct token* macro_name,struct Source_File *src,struct Translation_Data *translation_data);
void parse_define_line(struct Source_File *src,struct Translation_Data *translation_data);
F diff --git a/misc/gcc_arguments.c b/misc/gcc_arguments.c
--- a/misc/gcc_arguments.c
+++ b/misc/gcc_arguments.c
}
}
- ret->source_names=malloc(source_names->size+1);
+ ret->source_names=malloc((source_names->size+1)*sizeof(char*) );
ret->source_names[source_names->size]=NULL;
while(source_names->size)
{
F diff --git a/misc/gcc_string.c b/misc/gcc_string.c
--- a/misc/gcc_string.c
+++ b/misc/gcc_string.c
char* gstr_append(char *lead,char *follower)
{
char *ret,*hold;
- hold=ret=malloc(gstrlen(lead) + gstrlen(follower));
+ hold=ret=malloc(gstrlen(lead) + gstrlen(follower)+1);
while(*(hold++)=*(lead++));
hold--;
while(*(hold++)=*(follower++));
F diff --git a/parse/parse_expression.c b/parse/parse_expression.c
--- a/parse/parse_expression.c
+++ b/parse/parse_expression.c
}else
{
- return (struct AST*)parse_unary_expression(translation_data,scope);
+ ret=(struct AST_Unary_Expression*)parse_expression(translation_data,scope);
+ if(get_and_check(translation_data,KW_CLOSE_NORMAL))
+ {
+ return (struct AST*)ret;
+ }else
+ {
+ return (struct AST*)get_error_tree((struct AST*)ret);
+ }
+
}
}else
{
F diff --git a/semantics/program.c b/semantics/program.c
--- a/semantics/program.c
+++ b/semantics/program.c
Queue_Init(ret->errors);
Queue_Init(ret->source_files);
+ ret->macros=malloc(sizeof(struct Map));
+ Map_Init(ret->macros);
+
return ret;
}
struct Source_Name* get_source_name(char *filename,char *base)
++i;
/*prune the filename*/
offset=gstrlen(name->filename+i);
- hold_base=malloc(offset);
+ hold_base=malloc(offset+1);
strmv(hold_base,name->filename+i);
free(name->filename);
name->filename=hold_base;
F diff --git a/semantics/program.h b/semantics/program.h
--- a/semantics/program.h
+++ b/semantics/program.h
struct Queue *tokens;
struct Queue *errors;
struct Queue *source_files;
+
+ struct Map *macros;
};
struct Program* get_program();
F diff --git a/tests/test3.c b/tests/test3.c
--- a/tests/test3.c
+++ b/tests/test3.c
+ #define max(a,b) (a>b?a:b)
int main()
{
- struct A
- {
- int a;
- }as;
- }
- void asd()
- {
-
-
-
-
- struct const; A b;
- long long long short int a;
+ return max(1,3);
}