F diff --git a/src/debug/debug_ast.c b/src/debug/debug_ast.c
--- a/src/debug/debug_ast.c
+++ b/src/debug/debug_ast.c
else
return is_valid_value(designator->value)
&&
- is_valid_token(designator->id)
+ is_valid_id(designator->id)
;
}
_Bool is_valid_unary_expression(struct AST_Unary_Expression *expression)
F diff --git a/src/debug/debug_lexer.c b/src/debug/debug_lexer.c
--- a/src/debug/debug_lexer.c
+++ b/src/debug/debug_lexer.c
{
return keyword>=0 && keyword<LEXER_TYPE_END;
}
- _Bool is_valid_token(struct identifier *token)
+ _Bool is_valid_token(struct token *token)
+ {
+ return token!=NULL && is_valid_keyword_enum(token->type);
+ }
+ _Bool is_valid_id(struct identifier *id)
{
return 1;
}
F diff --git a/src/debug/debug_lexer.h b/src/debug/debug_lexer.h
--- a/src/debug/debug_lexer.h
+++ b/src/debug/debug_lexer.h
#include <common.h>
_Bool is_valid_keyword_enum(enum LEXER_TYPE keyword);
- _Bool is_valid_token(struct identifier *token);
+ _Bool is_valid_token(struct token *token);
+ _Bool is_valid_id(struct identifier *id);
_Bool is_valid_automata_node(struct Automata_Node *node);
#endif
F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c
--- a/src/frontend/lex/lex_preprocessing_directive.c
+++ b/src/frontend/lex/lex_preprocessing_directive.c
}
return (struct token*)ret;
}
+ struct token* preprocessing_lex_hastag_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where)
+ {
+ struct token_hashtag_unary_operator *ret;
+ struct token *hold_token;
+ ret=wonky_malloc(sizeof(struct token_hashtag_unary_operator));
+ ret->type=PKW_HASHTAG_UNARY_OP;
+ ret->delta=get_source_location_delta(lexer_data->previous_token_location,where);
+
+ if(!preprocessing_eol(lexer_data))
+ hold_token=preprocessing_extract_next_token(lexer_data);
+ if(hold_token==NULL || hold_token->type!=PKW_MACRO_ARGUMENT)
+ {
+ push_lexing_error("Expected macro argument after #",lexer_data);
+ ret->operand=NULL;
+ return (struct token*)ret;
+ }
+ ret->operand=(struct token_functionlike_define_argument*)hold_token;
+
+ return (struct token*)ret;
+ }
+ struct token* preprocessing_lex_hastag_hashtag_operator(struct Lexer_Data *lexer_data,struct Source_Location *where)
+ {
+ #warning this is not yet done!
+ }
struct token* preprocessing_extract_next_directive(struct Lexer_Data *lexer_data)
{
return preprocessing_extract_next_token_inner(lexer_data,1);
do{
+
preprocessing_skip_white_space(lexer_data);
+
+
where_does_the_token_start_in_the_source_file=lexer_data->where_in_src;
+ if(preprocessing_eol(lexer_data))
+ return NULL;
+
hold_node=preprocessing_feed_automata_until_error(lexer_data);
- if(preprocessing_eol(lexer_data))
- break;
if(hold_node==NULL)
return get_error_token("Unrecognised lexical element",get_source_location(
NON_WHITE_SPACE
}state=BLANK_SPACE;
- while(state!=NON_WHITE_SPACE && !preprocessing_eol(lexer_data))
+ while(state!=NON_WHITE_SPACE && (!preprocessing_eol(lexer_data) || state==POSSIBLE_LINE_SPLICE))
{
switch(lexer_data->src->src[lexer_data->where_in_src])
{
{
return lexer_data->src->src[lexer_data->where_in_src]=='\n' || lexer_eof(lexer_data);
}
- #warning a hashtag include "asdfadf" [ space ] makes the first assert fail. This is probably due to an empty token after the string one
struct token *preprocessing_make_token_finishing_on_node(struct Lexer_Data *lexer_data,struct Automata_Node *finishing_node,size_t start_position,_Bool create_directive)
{
struct Source_Location *token_location;
struct token* preprocessing_lex_include_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
struct token_include_directive *ret;
+ struct token *hold_token;
ret=wonky_malloc(sizeof(struct token_include_directive));
ret->type=PKW_INCLUDE;
Queue_Init(ret->tokens);
+ #warning a hashtag include "asdfadf" [ space ] makes wonky get corrupted...
+
while(!preprocessing_eol(lexer_data))
- Queue_Push(ret->tokens,preprocessing_extract_next_token(lexer_data));
+ {
+ hold_token=preprocessing_extract_next_token(lexer_data);
+ if(hold_token!=NULL)
+ Queue_Push(ret->tokens,hold_token);
+ }
return (struct token*)ret;
}
hold_token=preprocessing_extract_next_token(lexer_data);
- if(hold_token->type==KW_ID)
- {
- hold_id=((struct token_identifier*)hold_token)->id;
- }else if(token_is_keyword(hold_token))
- {
- hold_id=((struct token_keyword*)hold_token)->id;
- }else
+ if(hold_token==NULL)
{
preprocessing_goto_end_of_line(lexer_data);
delete_token((struct token*)hold_token);
/*NOTICE*/
return get_error_token("Id exprected after #define directive",where,lexer_data->previous_token_location,lexer_data->program);
+ }else if(hold_token->type==KW_ID)
+ {
+ hold_id=((struct token_identifier*)hold_token)->id;
+ }else if(token_is_keyword(hold_token))
+ {
+ hold_id=((struct token_keyword*)hold_token)->id;
}
+ #warning make lexer_data_check_char(lexer_data,CHAR_OPEN_NORMAL) so we check for '(' directly next to id
hold_hold_token=preprocessing_extract_next_token(lexer_data);
- if(hold_hold_token->type==KW_OPEN_NORMAL)
+ if(hold_hold_token==NULL)
+ {
+ ret=(struct token_normal_define_directive*)get_normal_define_directive_token(where,lexer_data->previous_token_location,hold_id);
+ delete_token((struct token*)hold_token);
+ return ret;
+ }else if(hold_hold_token->type==KW_OPEN_NORMAL)
{
ret=preprocessing_lex_functionlike_define_directive(lexer_data,where,hold_id);
delete_token((struct token*)hold_token);
preprocessing_parse_functionlike_macro_id_list(lexer_data,ret);
+ lexer_data->automata_view=AUTOMATA_VIEW_PREPROCESSING_DIRECTIVE;
preprocessing_push_functionlike_macro_substitution_tokens(lexer_data,where,ret->define);
+ lexer_data->automata_view=AUTOMATA_VIEW_NORMAL;
return (struct token*)ret;
}
while(!preprocessing_eol(lexer_data))
{
hold_token=preprocessing_extract_next_token(lexer_data);
+
+ wonky_assert(hold_token!=NULL);
+
if(hold_token->type==KW_ID && ((struct token_identifier*)hold_token)->id->hold_functionlike_define_directive==directive)
{
+ #warning this warning was placed when making the # and ## operations
+
Queue_Push(directive->replacement_tokens,
get_functionlike_define_directive_argument_token(
lexer_data->previous_token_location,
((struct token_keyword*)hold_token)->id->hold_functionlike_define_directive_argument));
delete_token(hold_token);
+ }else if(hold_token->type==KW_HASHTAG)
+ {
+ Queue_Push(directive->replacement_tokens,preprocessing_lex_hastag_unary_operator(lexer_data,where));
}else
{
Queue_Push(directive->replacement_tokens,hold_token);
F diff --git a/src/frontend/lex/lex_preprocessing_directive.h b/src/frontend/lex/lex_preprocessing_directive.h
--- a/src/frontend/lex/lex_preprocessing_directive.h
+++ b/src/frontend/lex/lex_preprocessing_directive.h
struct token* preprocessing_lex_pragma_directive(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_lex_defined_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
+ struct token* preprocessing_lex_hastag_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
+ struct token* preprocessing_lex_hastag_hashtag_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
+
struct token* preprocessing_return_else_token(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_return_endif_token(struct Lexer_Data *lexer_data,struct Source_Location *where);
void preprocessing_parse_functionlike_macro_id_list(struct Lexer_Data *lexer_data,struct token_functionlike_define_directive *directive);
+ _Bool preprocessing_has_hit_hashtag(struct Lexer_Data *lexer_data);
+
#endif
F diff --git a/src/frontend/lex/lexer.c b/src/frontend/lex/lexer.c
--- a/src/frontend/lex/lexer.c
+++ b/src/frontend/lex/lexer.c
case KW_HASHTAG:
if(lexer_data->automata_view==AUTOMATA_VIEW_PREPROCESSING_DIRECTIVE)
{
- wonky_assert(!"IMPLEMENT # OPERATOR IN PREPROCESSING DIRECTIVES");
+ return preprocessing_lex_hastag_unary_operator(lexer_data,token_location);
}else
{
if(!lexer_data->is_in_the_begining_of_line)
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c
--- a/src/semantics/program/translation_unit.c
+++ b/src/semantics/program/translation_unit.c
include_name=gstrn_append(include_name,"-",100);
break;
default:
- push_token_ptr_error("Unsupported symbol found inside filename in include directive with angular brackets and macro expansion",ptr);
- return;/*NOTICE*/
+ if(token_is_keyword(hold_token))
+ {
+ include_name=gstrn_append(include_name,((struct token_keyword*)hold_token)->id->data,100);
+ }else
+ {
+ push_token_ptr_error("Unsupported symbol found inside filename in include directive with angular brackets and macro expansion",ptr);
+ return;/*NOTICE*/
+ }
}
hold_token=token_ptr_get_token_under_pointer(ptr);
}
else
token_ptr_jump_to(ptr,argument->first_in_argument_substitution_tokens,argument->number_of_substitution_tokens);
}
+ void token_ptr_execute_stringify_functionlike_macro_argument(struct Token_Pointer *ptr,struct token_hashtag_unary_operator *op)
+ {
+ struct token_string *hold_string_token;
+ struct token *hold_token;
+ size_t i;
+ Queue_Node *it;
+
+
+ char *hold_str;
+ size_t hold_str_size;
+
+ /*This wastes a LOT OF MEMORY. TODO make a temp token allocation scheme*/
+
+ hold_string_token=(struct token_string*)get_string_token(KW_STRING,op->delta->location,op->delta->location,"",0);
+ for(it=op->operand->argument->first_in_argument_substitution_tokens,i=0;i<op->operand->argument->number_of_substitution_tokens && it!=NULL;++i,it=it->prev)
+ {
+ hold_token=(struct token*)it->data;
+ wonky_assert(is_valid_token(hold_token));
+ if(hold_token->type==KW_ID)
+ {
+ hold_str=((struct token_identifier*)hold_token)->id->data;
+ hold_str_size=((struct token_identifier*)hold_token)->id->size;
+ }else
+ {
+ /*hack*/
+ hold_str=(char*)lexer_type_to_string_map[hold_token->type];
+ hold_str_size=gstrnlen(hold_str,100);
+ }
+ hold_string_token=(struct token_string*)
+ get_token_from_two_strings_with_a_space_between(
+ hold_string_token,
+ (struct token_string*)get_string_token(KW_STRING,hold_token->delta->location,hold_token->delta->location,hold_str,hold_str_size)
+ );
+ }
+
+ }
void token_ptr_load_functionlike_macro_arguments_with_tokens(struct Token_Pointer *ptr,struct functionlike_define_directive *macro)
{
int open_bracket_count=1;
F diff --git a/src/semantics/program/translation_unit.h b/src/semantics/program/translation_unit.h
--- a/src/semantics/program/translation_unit.h
+++ b/src/semantics/program/translation_unit.h
void token_ptr_execute_normal_macro(struct Token_Pointer *ptr,struct normal_define_directive *macro);
void token_ptr_execute_functionlike_macro(struct Token_Pointer *ptr,struct functionlike_define_directive *macro);
void token_ptr_execute_functionlike_macro_argument(struct Token_Pointer *ptr,struct functionlike_define_directive_argument *argument);
+
+
+
+ void token_ptr_execute_stringify_functionlike_macro_argument(struct Token_Pointer *ptr,struct token_hashtag_unary_operator *op);
+
+ //void token_ptr_execute_concat_functionlike_macro_arguments(struct Token_Pointer *ptr,
+
+
+
void token_ptr_load_functionlike_macro_arguments_with_tokens(struct Token_Pointer *ptr,struct functionlike_define_directive *macro);
_Bool token_ptr_do_preprocessing_stuff(struct Token_Pointer *ptr,struct token *token);
F diff --git a/src/semantics/value/constant.c b/src/semantics/value/constant.c
--- a/src/semantics/value/constant.c
+++ b/src/semantics/value/constant.c
return extract_literal_string(hold_new,first_size+second_size);
}
+ /*TODO*/
+ struct Constant* concatenate_string_literals_with_space_between(struct Constant *first,struct Constant *second)
+ {
+ size_t first_size;
+ size_t second_size;
+ char *hold_new;
+ first_size=get_type_size(first->type);
+ second_size=get_type_size(second->type);
+
+ hold_new=wonky_malloc(first_size+second_size+1);
+
+ gmemmove(hold_new,first->value,first_size);
+ hold_new[first_size]=' ';
+ gmemmove(hold_new+first_size+1,second->value,second_size);
+
+ return extract_literal_string(hold_new,first_size+second_size+1);
+
+ }
struct Constant* get_long_long_int_constant(long long number)
{
struct Constant *ret;
F diff --git a/src/semantics/value/constant.h b/src/semantics/value/constant.h
--- a/src/semantics/value/constant.h
+++ b/src/semantics/value/constant.h
struct Constant* concatenate_string_literals(struct Constant *first,struct Constant *second);
+ struct Constant* concatenate_string_literals_with_space_between(struct Constant *first,struct Constant *second);
struct Constant* get_long_long_int_constant(long long number);
F diff --git a/src/syntax/automatas/automata.h b/src/syntax/automatas/automata.h
--- a/src/syntax/automatas/automata.h
+++ b/src/syntax/automatas/automata.h
extern struct identifier file_special_identifier;
+ extern const char *lexer_type_to_string_map[LEXER_TYPE_END];
#endif
F diff --git a/src/syntax/automatas/automata.hh b/src/syntax/automatas/automata.hh
--- a/src/syntax/automatas/automata.hh
+++ b/src/syntax/automatas/automata.hh
PKW_PUNCTUATOR,
PKW_DIRECTIVE,
PKW_MACRO_ARGUMENT,
+ PKW_HASHTAG_UNARY_OP,
+ PKW_HASHTAG_HASHTAG_OP,
LT_EOF,
LT_ERROR,
F diff --git a/src/syntax/automatas/generator/generator.c b/src/syntax/automatas/generator/generator.c
--- a/src/syntax/automatas/generator/generator.c
+++ b/src/syntax/automatas/generator/generator.c
fprintf(chonky_c,"#ifndef WONKY_CHONKY_C\n#define WONKY_CHONKY_C WONKY_CHONKY_C\n#include <stddef.h>\n#include <automata.h>\n\n");
print_automata(chonky,"chonky",chonky_c);
fprintf(chonky_c,"\nstruct Automata_Node *id_node=&chonky[%zd];\n\n",global_id_node->node_number);
+ print_lexer_type_to_string_map(chonky_c);
fprintf(chonky_c,"\n#endif");
}
void print_automata(struct Generator_Node *root,const char *name,FILE *out)
wonky_assert(node_queue->size==0);
wonky_free(node_queue);
}
+ void print_lexer_type_to_string_map(FILE *out)
+ {
+ fprintf(out,"const char *lexer_type_to_string_map[LEXER_TYPE_END]=\n\t{\n",number_of_chonky_keywords);
+ for(size_t i=0;i<number_of_chonky_keywords;++i)
+ {
+ fprintf(out,"\t\t[%s]=\"%s\",\n",chonky_keywords[i].kw_string, chonky_keywords[i].keyword);
+ }
+ fprintf(out,"\n\t};");
+ }
struct Generator_Node* make_chonky()
{
struct Generator_Node *ret;
F diff --git a/src/syntax/automatas/generator/generator.h b/src/syntax/automatas/generator/generator.h
--- a/src/syntax/automatas/generator/generator.h
+++ b/src/syntax/automatas/generator/generator.h
void print_automata(struct Generator_Node *root,const char *name,FILE *out);
void print_automata_nodes(struct Generator_Node *node,const char *name,FILE *out);
+ void print_lexer_type_to_string_map(FILE *out);
+
struct Generator_Node* make_chonky();
struct Generator_Node* make_generator(const struct Keyword_Entry *keywords,size_t number_of_keywords);
struct Generator_Node* insert_keyword(struct Generator_Node *node,const struct Keyword_Entry *entry);
F diff --git a/src/syntax/source_file.c b/src/syntax/source_file.c
--- a/src/syntax/source_file.c
+++ b/src/syntax/source_file.c
.line=0,
.column=0,
.on_which_byte=0,
- .src_name=NULL,
+ .src_name=&(struct Source_Name){.name="TODO startoffile",.name_size=sizeof("TODO startoffile")},
};
struct Source_File* get_source_file_from_string(char *filename,size_t filename_size,struct Program *program)
{
F diff --git a/src/syntax/token/token.c b/src/syntax/token/token.c
--- a/src/syntax/token/token.c
+++ b/src/syntax/token/token.c
struct token *ret;
ret=wonky_malloc(sizeof(struct token));
ret->type=LT_EOF;
- ret->delta=NULL;
+ ret->delta=wonky_malloc(sizeof(struct Source_Location_Delta));
+ ret->delta->line_offset=0;
+ ret->delta->column=0;
+ ret->delta->location=&start_of_file;
return ret;
}
return (struct token*)ret;
}
+ struct token* get_token_from_two_strings_with_a_space_between(struct token_string *first,struct token_string *second)
+ {
+ struct token_string *ret;
+ ret=wonky_malloc(sizeof(struct token_string));
+ ret->type=KW_STRING;
+ ret->delta=first->delta;
+ ret->constant=concatenate_string_literals_with_space_between(first->constant,second->constant);
+
+ return (struct token*)ret;
+ }
struct token* get_file_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location)
{
struct token *ret;
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h
--- a/src/syntax/token/token.h
+++ b/src/syntax/token/token.h
{
enum LEXER_TYPE type;
struct Source_Location_Delta *delta;
- struct token_functionlike_define_directive *operand;
+ struct token_functionlike_define_argument *operand;
};
- struct token_hashtag_hastag_unary_operator
+ struct token_hashtag_hastag_operator
{
enum LEXER_TYPE type;
struct Source_Location_Delta *delta;
struct token* get_error_token(const char *msg,struct Source_Location *current_location,struct Source_Location *previous_location,struct Program *program,...);
struct token* get_eof_token();
struct token* get_token_from_two_adjacent_strings(struct token_string *first,struct token_string *second);
+ struct token* get_token_from_two_strings_with_a_space_between(struct token_string *first,struct token_string *second);
struct token* get_file_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
struct token* get_date_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);
struct token* get_line_macro_token(struct Source_Location *current_location,struct Source_Location *previous_location);