F diff --git a/build/cmake/libs/innards.txt b/build/cmake/libs/innards.txt
--- a/build/cmake/libs/innards.txt
+++ b/build/cmake/libs/innards.txt
src/syntax/source_file.c
src/syntax/token/token.c
src/syntax/automatas/automata.c
+ src/semantics/program/translation_unit.c
)
F diff --git a/src/backend/text/print/print.c b/src/backend/text/print/print.c
--- a/src/backend/text/print/print.c
+++ b/src/backend/text/print/print.c
wonky_assert(SHOULD_NOT_REACH_HERE);
}
+ void print_id(struct Compile_Data_Print *compile_data,struct identifier *id)
+ {
+
+ append_to_last_line(id->data,compile_data->lines);
+
+ }
#undef TOK
#undef INDENT
F diff --git a/src/debug/debug_lexer.c b/src/debug/debug_lexer.c
--- a/src/debug/debug_lexer.c
+++ b/src/debug/debug_lexer.c
{
return 1;
}
+ _Bool is_valid_automata_node(struct Automata_Node *node)
+ {
+ return 1;
+ }
#endif
F diff --git a/src/environment/error/gcc_error.c b/src/environment/error/gcc_error.c
--- a/src/environment/error/gcc_error.c
+++ b/src/environment/error/gcc_error.c
while(errors->size>0)
fputs(Queue_Pop(errors),out);
}
+ void hard_error(char *format,...)
+ {
+ va_list args;
+
+ va_start(args,format);
+
+ vfprintf(stderr,format,args);
+
+ abort();
+ }
#endif
F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c
--- a/src/frontend/lex/lex_preprocessing_directive.c
+++ b/src/frontend/lex/lex_preprocessing_directive.c
return get_error_token("PREPROCESSING DIRECTIVES NOT DONE",where,lexer_data->program);
}
+ struct token* preprocessing_lex_defined_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where)
+ {
+ return get_error_token("PREPROCESSING DIRECTIVES NOT DONE",where,lexer_data->program);
+ }
#endif
F diff --git a/src/frontend/lex/lex_preprocessing_directive.h b/src/frontend/lex/lex_preprocessing_directive.h
--- a/src/frontend/lex/lex_preprocessing_directive.h
+++ b/src/frontend/lex/lex_preprocessing_directive.h
struct token* preprocessing_lex_line_directive(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_lex_error_directive(struct Lexer_Data *lexer_data,struct Source_Location *where);
struct token* preprocessing_lex_pragma_directive(struct Lexer_Data *lexer_data,struct Source_Location *where);
+ struct token* preprocessing_lex_defined_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where);
#endif
F diff --git a/src/frontend/lex/lexer.c b/src/frontend/lex/lexer.c
--- a/src/frontend/lex/lexer.c
+++ b/src/frontend/lex/lexer.c
wonky_assert(SHOULD_NOT_REACH_HERE);
}
-
+ struct token *lex_defined_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where)
+ {
+ return preprocessing_lex_defined_unary_operator(lexer_data,where);
+ }
struct token *lex_preprocessing_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
if(lexer_eof(lexer_data))
return preprocessing_lex_directive(lexer_data,where);
}
+ void delete_lexer_data(struct Lexer_Data *lexer_data)
+ {
+ wonky_free(lexer_data);
+
+ }
#endif
F diff --git a/src/semantics/program/program.c b/src/semantics/program/program.c
--- a/src/semantics/program/program.c
+++ b/src/semantics/program/program.c
}
}
+ struct token* get_next_token(struct Translation_Data *translation_data)
+ {
+ return token_ptr_get_token_under_pointer(translation_data->token_pointer);
+ }
+ _Bool translation_eof(struct Translation_Data *translation_data)
+ {
+ return token_ptr_has_remaining_tokens(translation_data->token_pointer);
+ }
#endif
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c
--- a/src/semantics/program/translation_unit.c
+++ b/src/semantics/program/translation_unit.c
struct token* token_ptr_get_token_under_pointer(struct Token_Pointer *token_pointer)
{
struct token *hold_token;
+ if(token_pointer->current_token_node==NULL)
+ return get_eof_token();
+
+ token_ptr_goto_next_normal_token(token_pointer);
+ hold_token=(struct token*)token_pointer->current_token_node->data;
+
+ token_ptr_goto_next_token(token_pointer);
+
+ return hold_token;
+
+
+ }
+ void token_ptr_goto_next_normal_token(struct Token_Pointer *token_pointer)
+ {
+ struct token *hold_token;
while(1)
{
hold_token=(struct token*)token_pointer->current_token_node->data;
break;
case PKW_FUNCTIONLIKE_DEFINE:
token_ptr_execute_functionlike_define_directive(token_pointer,(struct token_functionlike_define_directive*)hold_token);
- break
+ break;
case PKW_UNDEF:
token_ptr_execute_undef_directive(token_pointer,(struct token_undef_directive*)hold_token);
break;
token_pointer->current_token_node=Stack_Pop(token_pointer->call_stack);
}else
{
- return hold_token;
+ token_pointer->current_token_node=NULL;
+ return;
}
break;
default:
- token_ptr_goto_next_token(token_pointer);
- return hold_token;
+ return;
}
}
wonky_assert(SHOULD_NOT_REACH_HERE);
}
+ struct token* token_ptr_check_next_normal_token(struct Token_Pointer *token_pointer)
+ {
+ token_ptr_goto_next_normal_token(token_pointer);
+ if(token_pointer->current_token_node==NULL)
+ return get_eof_token();
+ else
+ return (struct token*)token_pointer->current_token_node->data;
+ }
void token_ptr_goto_next_token(struct Token_Pointer *token_pointer)
{
wonky_assert(token_pointer!=NULL);
ret->current_token_node=unit->tokens->first;
ret->call_stack=wonky_malloc(sizeof(struct Stack));
Stack_Init(ret->call_stack);
- ret->location=NULL/*todo*/
+ ret->current_location=NULL;/*todo*/
}
void token_ptr_execute_include_directive(struct Token_Pointer *ptr,struct token_include_directive *include_directive)
token_ptr_goto_next_token(ptr);
}
+ void delete_token_ptr(struct Token_Pointer *ptr)
+ {
+ wonky_free(ptr);
+ }
+ _Bool token_ptr_has_remaining_tokens(struct Token_Pointer *ptr)
+ {
+ token_ptr_goto_next_normal_token(ptr);
+
+ return ptr->current_token_node!=NULL;
+ }
+ void token_ptr_unget_token(struct Token_Pointer *ptr,struct token *token)
+ {
+ Queue_Push(ptr->ungeted_tokens,token);
+ }
#endif
F diff --git a/src/semantics/program/translation_unit.h b/src/semantics/program/translation_unit.h
--- a/src/semantics/program/translation_unit.h
+++ b/src/semantics/program/translation_unit.h
struct Stack *call_stack;
struct Source_Location *current_location;
struct Program *program;
+ struct Queue *ungeted_tokens;
};
struct Preprocessing_Translation_Unit* get_preprocessing_translation_unit(struct Source_File *source);
void delete_preprocessing_translation_unit(struct Preprocessing_Translation_Unit *unit);
struct token* token_ptr_get_token_under_pointer(struct Token_Pointer *token_pointer);
struct token* token_ptr_check_next_normal_token(struct Token_Pointer *token_pointer);
void token_ptr_goto_next_token(struct Token_Pointer *token_pointer);
+ void token_ptr_goto_next_normal_token(struct Token_Pointer *token_pointer);
struct Token_Pointer* get_token_ptr(struct Preprocessing_Translation_Unit *unit);
void delete_token_ptr(struct Token_Pointer *ptr);
F diff --git a/src/semantics/program/translation_unit.hh b/src/semantics/program/translation_unit.hh
--- a/src/semantics/program/translation_unit.hh
+++ b/src/semantics/program/translation_unit.hh
#ifndef WONKY_TRANSLATION_UNIT_HH
#define WONKY_TRANSLATION_UNIT_HH WONKY_TRANSLATION_UNIT_HH
+ struct Preprocessing_Translation_Unit;
+ struct Token_Pointer;
#endif
F diff --git a/src/semantics/value/constant.c b/src/semantics/value/constant.c
--- a/src/semantics/value/constant.c
+++ b/src/semantics/value/constant.c
wonky_free(constant);
}
+ struct Constant* extract_literal_integer_dec(char *data,size_t data_size,enum Type_Constraint constraint,enum Type_Signedness sign)
+ {
+ unsigned long long int cache=0;
+ unsigned long long int *ret_component;
+ struct Constant *ret;
+ size_t i;
+
+ ret_component=wonky_malloc(sizeof(unsigned long long int));
+ ret=wonky_malloc(sizeof(struct Constant));
+
+ for(i=0;i<data_size;++i)
+ cache*=10 , cache+=data[i]-'0';
+
+ *ret_component=cache;
+ ret->value=ret_component;
+ ret->type=(struct Type*)get_type_insecure(TS_INT,sign,constraint,INT_SIZE);
+
+ return ret;
+ }
struct Constant* extract_literal_integer_octal(char *data,size_t data_size,enum Type_Constraint constraint,enum Type_Signedness sign)
{
unsigned long long int cache=0;
return ret;
}
+ struct Constant* extract_literal_float_dec(char *data,size_t data_size)
+ {
+ long double *ret_component;
+ struct Constant *ret;
+
+ ret_component=wonky_malloc(sizeof(double));
+ ret=wonky_malloc(sizeof(struct Constant));
+
+ *ret_component=.0l;
+ ret->value=ret_component;
+ ret->type=(struct Type*)get_type_insecure(TS_DOUBLE,TSIGN_NONE,TC_NONE,DOUBLE_SIZE);
+
+ return ret;
+ }
+ struct Constant* extract_literal_float_hex(char *data,size_t data_size)
+ {
+ long double *ret_component;
+ struct Constant *ret;
+
+ ret_component=wonky_malloc(sizeof(double));
+ ret=wonky_malloc(sizeof(struct Constant));
+ *ret_component=.0l;
+ ret->value=ret_component;
+ ret->type=(struct Type*)get_type_insecure(TS_DOUBLE,TSIGN_NONE,TC_NONE,DOUBLE_SIZE);
+
+ return ret;
+ }
struct Constant* extract_literal_char(char *data,size_t data_size)
{
F diff --git a/src/syntax/automatas/automata.hh b/src/syntax/automatas/automata.hh
--- a/src/syntax/automatas/automata.hh
+++ b/src/syntax/automatas/automata.hh
PKW_COMMENT,
PKW_NOTYPE,
LT_EOF,
+ LT_ERROR,
LEXER_TYPE_END
};
F diff --git a/src/syntax/source_file.c b/src/syntax/source_file.c
--- a/src/syntax/source_file.c
+++ b/src/syntax/source_file.c
}
+ struct Source_Location* get_source_location(size_t line,size_t column,size_t on_which_byte,struct Source_Name *src_name)
+ {
+ struct Source_Location *ret;
+
+ ret=wonky_malloc(sizeof(struct Source_Location));
+ ret->line=line;
+ ret->column=column;
+ ret->on_which_byte=on_which_byte;
+ ret->src_name=src_name;
+
+ return ret;
+ }
+ void delete_source_file(struct Source_File *src)
+ {
+ wonky_free(src);
+ }
#endif
F diff --git a/src/syntax/token/token.c b/src/syntax/token/token.c
--- a/src/syntax/token/token.c
+++ b/src/syntax/token/token.c
struct token* get_error_directive_token(struct Source_Location *location,struct token_string *error_message);
struct token* get_pragma_directive(struct Source_Location *location,enum Pragma_Type type);
struct token* get_defined_unary_operator(struct Source_Location *location,struct identifier *id);
+
+ struct token* get_error_token(const char *msg,struct Source_Location *location,struct Program *program,...)
+ {
+ struct token_error *ret;
+ va_list args;
+
+ va_start(args,program);
+
+ ret=wonky_malloc(sizeof(struct token_error));
+ ret->type=LT_ERROR;
+ ret->location=location;
+ ret->error=get_translation_message(msg,program,location,args);
+
+ return (struct token*)ret;
+ }
+ struct token* get_eof_token()
+ {
+ struct token *ret;
+ ret=wonky_malloc(sizeof(struct token));
+ ret->type=LT_EOF;
+
+ return ret;
+ }
#endif
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h
--- a/src/syntax/token/token.h
+++ b/src/syntax/token/token.h
struct token* get_defined_unary_operator(struct Source_Location *location,struct identifier *id);
struct token* get_hashtag_unary_operator(struct Source_Location *location,struct token_functionlike_define_directive *operand);
struct token* get_hashtag_hashtag_unary_operator(struct Source_Location *location,struct Queue *operands);
- struct token* get_error_token(const char *msg,struct Source_Location *location,struct Program *program, ...);
+ struct token* get_error_token(const char *msg,struct Source_Location *location,struct Program *program,...);
struct token* get_eof_token();
#endif