WONKY



LOG | FILES | OVERVIEW


F diff --git a/build/cmake/libs/innards.txt b/build/cmake/libs/innards.txt --- a/build/cmake/libs/innards.txt +++ b/build/cmake/libs/innards.txt
src/semantics/value/value.c
src/syntax/identifier/identifier.c
src/syntax/source_file.c
+ src/syntax/macro.c
src/syntax/token/token.c
src/syntax/automatas/automata.c
src/syntax/automatas/id_node.c
F diff --git a/src/backend/asm/intel/intel_asm.c b/src/backend/asm/intel/intel_asm.c --- a/src/backend/asm/intel/intel_asm.c +++ b/src/backend/asm/intel/intel_asm.c
wonky_assert(string->type->specifier==TS_ARRAY && string->value!=NULL);
string_type=(struct Type_Array*)string->type;
- db=get_intel_asm_define_bytes(string->value,string_type->size-2);
+ db=get_intel_asm_define_bytes(string->value,string_type->size);
db2=get_intel_asm_define_bytes("\0",1);
label=get_intel_asm_new_unique_label(compile_data);
F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c --- a/src/frontend/lex/lex_preprocessing_directive.c +++ b/src/frontend/lex/lex_preprocessing_directive.c
struct token* preprocessing_lex_directive(struct Lexer_Data *lexer_data,struct Source_Location *where)
{
- return preprocessing_extract_next_token(lexer_data);
+ struct token *ret;
+
+ ret=preprocessing_extract_next_token(lexer_data);
+
+ if(ret==NULL)
+ return get_error_token("PREPROCESSING EMPTY DIRECTIVE NOT SUPPORTED",where,lexer_data->program);
+
+ switch(ret->type)
+ {
+ case LT_ERROR:
+ return ret;
+ default:
+ return get_error_token("PREPROCESSING DIRECTIVE NOT RECOGNIZED",where,lexer_data->program);
+ }
+
+ return ret;
}
struct token* preprocessing_lex_defined_unary_operator(struct Lexer_Data *lexer_data,struct Source_Location *where)
hold_node=preprocessing_feed_automata_until_error(lexer_data);
if(preprocessing_eol(lexer_data))
- return NULL;
+ break;
if(hold_node==NULL)
return get_error_token("Unrecognised lexical element",get_source_location(
F diff --git a/src/frontend/lex/lexer.c b/src/frontend/lex/lexer.c --- a/src/frontend/lex/lexer.c +++ b/src/frontend/lex/lexer.c
case KW_STRING:
case KW_WIDE_STRING:
- return get_string_token(finishing_node->keyword,token_location,lexer_data->src->src+start_position,lexer_data->where_in_src-start_position);
+ return get_string_token(finishing_node->keyword,token_location,lexer_data->src->src+start_position+1,lexer_data->where_in_src-start_position-2);
+ case PKW_FILE_MACRO:
+ return get_file_macro_token(token_location);
+ case PKW_DATE_MACRO:
+ return get_date_macro_token(token_location);
+ case PKW_LINE_MACRO:
+ return get_line_macro_token(token_location);
+ case PKW_STDC_MACRO:
+ return get_stdc_macro_token(token_location);
+ case PKW_STDC_HOSTED_MACRO:
+ return get_stdc_hosted_macro_token(token_location);
+ case PKW_STDC_VERSION_MACRO:
+ return get_stdc_version_macro_token(token_location);
+ case PKW_TIME_MACRO:
+ return get_time_macro_token(token_location);
case PKW_IF:
case PKW_IFDEF:
case PKW_IFNDEF:
}
return get_id_token(finishing_node->data,token_location);
}
+ default:
+ return get_error_token("Unexpected token",token_location,lexer_data->program);
}
wonky_assert(SHOULD_NOT_REACH_HERE);
F diff --git a/src/semantics/program/program.c b/src/semantics/program/program.c --- a/src/semantics/program/program.c +++ b/src/semantics/program/program.c
+
ret->external_linkage=get_linkage();
Queue_Init(ret->translation_units);
F diff --git a/src/semantics/program/program.h b/src/semantics/program/program.h --- a/src/semantics/program/program.h +++ b/src/semantics/program/program.h
/*ASTs*/
struct Queue *functions_without_a_definition;
struct Queue *external_objects_without_an_initialiser;
+
};
struct Translation_Data
{
F diff --git a/src/semantics/program/translation_unit.c b/src/semantics/program/translation_unit.c --- a/src/semantics/program/translation_unit.c +++ b/src/semantics/program/translation_unit.c
struct token* token_ptr_get_token_under_pointer(struct Token_Pointer *token_pointer)
{
struct token *hold_token;
+ struct token *hold_hold_token;
+
+ hold_token=token_ptr_get_token_under_pointer_inner(token_pointer);
+
+ if(hold_token->type==KW_STRING)
+ {
+ while(token_ptr_check_next_normal_token(token_pointer)->type==KW_STRING)
+ {
+ hold_hold_token=hold_token;
+ hold_token=token_ptr_get_token_under_pointer_inner(token_pointer);
+ }
+ }
+
+ return hold_token;
+ }
+ struct token* token_ptr_get_token_under_pointer_inner(struct Token_Pointer *token_pointer)
+ {
+ struct token *hold_token;
if(!token_ptr_has_remaining_tokens(token_pointer))
return get_eof_token();
return token_ptr_get_buffered_token(token_pointer);
token_ptr_goto_next_normal_token(token_pointer);
+
+ if(token_ptr_has_buffered_tokens(token_pointer)) /*A special macro might buffer a token*/
+ return token_ptr_get_buffered_token(token_pointer);
+
hold_token=(struct token*)token_pointer->current_token_node->data;
token_ptr_goto_next_token(token_pointer);
case PKW_PRAGMA:
token_ptr_execute_pragma_directive(token_pointer,(struct token_pragma_directive*)hold_token);
break;
+ case PKW_FILE_MACRO:
+ token_ptr_execute_file_special_macro(token_pointer);
+ return;/*NOTICE*/
+ case PKW_LINE_MACRO:
+ token_ptr_execute_line_special_macro(token_pointer);
+ return;/*NOTICE*/
+ case PKW_STDC_MACRO:
+ token_ptr_execute_stdc_special_macro(token_pointer);
+ return;/*NOTICE*/
+ case PKW_STDC_HOSTED_MACRO:
+ token_ptr_execute_stdc_hosted_special_macro(token_pointer);
+ return;/*NOTICE*/
+ case PKW_STDC_VERSION_MACRO:
+ token_ptr_execute_stdc_version_special_macro(token_pointer);
+ return;/*NOTICE*/
case LT_EOF:
if(token_pointer->call_stack->size>0)
{
{
token_ptr_goto_next_token(ptr);
}
+ void token_ptr_execute_file_special_macro(struct Token_Pointer *ptr)
+ {
+ struct token_string *file;
+ token_ptr_goto_next_token(ptr);
+ file=(struct token_string*)get_string_token(KW_STRING,ptr->current_location,ptr->current_location->src_name->full_name,ptr->current_location->src_name->full_name_size);
+ Queue_Push(ptr->ungeted_tokens,file);
+ }
+ void token_ptr_execute_line_special_macro(struct Token_Pointer *ptr)
+ {
+ struct token_constant *line;
+ token_ptr_goto_next_token(ptr);
+
+ line=(struct token_constant*)get_constant_long_long_int_token(ptr->current_location,ptr->current_location->line+1);
+ Queue_Push(ptr->ungeted_tokens,line);
+ }
+ void token_ptr_execute_stdc_special_macro(struct Token_Pointer *ptr)
+ {
+ struct token_constant *line;
+ token_ptr_goto_next_token(ptr);
+
+ line=(struct token_constant*)get_constant_token(KW_DECIMAL_CONSTANT,ptr->current_location,"1",1);
+ Queue_Push(ptr->ungeted_tokens,line);
+ }
+ void token_ptr_execute_stdc_hosted_special_macro(struct Token_Pointer *ptr)
+ {
+ struct token_constant *line;
+ token_ptr_goto_next_token(ptr);
+
+ line=(struct token_constant*)get_constant_token(KW_DECIMAL_CONSTANT,ptr->current_location,"1",1);
+ Queue_Push(ptr->ungeted_tokens,line);
+ }
+ void token_ptr_execute_stdc_version_special_macro(struct Token_Pointer *ptr)
+ {
+ struct token_constant *line;
+ token_ptr_goto_next_token(ptr);
+
+ line=(struct token_constant*)get_constant_token(KW_LONG_DECIMAL_CONSTANT,ptr->current_location,"199901",sizeof("199901")-1);
+ Queue_Push(ptr->ungeted_tokens,line);
+ }
void delete_token_ptr(struct Token_Pointer *ptr)
{
wonky_free(ptr);
return 1;
}else
{
- token_ptr_goto_next_normal_token(ptr);
-
return ptr->current_token_node!=NULL;
}
}
F diff --git a/src/semantics/program/translation_unit.h b/src/semantics/program/translation_unit.h --- a/src/semantics/program/translation_unit.h +++ b/src/semantics/program/translation_unit.h
void push_token_into_preprocessing_translation_unit(struct Preprocessing_Translation_Unit *unit,struct token *token);
struct token* token_ptr_get_token_under_pointer(struct Token_Pointer *token_pointer);
+ struct token* token_ptr_get_token_under_pointer_inner(struct Token_Pointer *token_pointer);
struct token* token_ptr_check_next_normal_token(struct Token_Pointer *token_pointer);
void token_ptr_goto_next_token(struct Token_Pointer *token_pointer);
void token_ptr_goto_next_normal_token(struct Token_Pointer *token_pointer);
void token_ptr_execute_error_directive(struct Token_Pointer *ptr,struct token_error_directive *error_directive);
void token_ptr_execute_pragma_directive(struct Token_Pointer *ptr,struct token_pragma_directive *pragma_directive);
void token_ptr_execute_defined_unary_operator(struct Token_Pointer *ptr,struct token_defined_unary_operator *operator);
+ void token_ptr_execute_file_special_macro(struct Token_Pointer *ptr);
+ void token_ptr_execute_line_special_macro(struct Token_Pointer *ptr);
+ void token_ptr_execute_stdc_special_macro(struct Token_Pointer *ptr);
+ void token_ptr_execute_stdc_hosted_special_macro(struct Token_Pointer *ptr);
+ void token_ptr_execute_stdc_version_special_macro(struct Token_Pointer *ptr);
void token_ptr_assume_location_of_token(struct Token_Pointer *ptr,struct token *token);
struct token* token_ptr_get_buffered_token(struct Token_Pointer *ptr);
struct token* token_ptr_check_buffered_token(struct Token_Pointer *ptr);
+ void token_ptr_jump_to(struct Token_Pointer *ptr,struct Queue_Node *where_to);
+
#endif
F diff --git a/src/semantics/value/constant.c b/src/semantics/value/constant.c --- a/src/semantics/value/constant.c +++ b/src/semantics/value/constant.c
ret=wonky_malloc(sizeof(struct Constant));
- ret_component=gstrncpy(data+1,data_size-2);
+ ret_component=gstrncpy(data,data_size);
ret->value=ret_component;
ret->type=(struct Type*)get_type_insecure(TS_CHAR,TSIGN_NONE,TC_NONE,CHAR_SIZE);
ret=wonky_malloc(sizeof(struct Constant));
- ret_component=gstrncpy(data+1,data_size-2);
+ ret_component=gstrncpy(data,data_size);
ret->value=ret_component;
ret->type=(struct Type*)get_type_insecure(TS_CHAR,TSIGN_NONE,TC_NONE,CHAR_SIZE);
return ret;
}
+ /*TODO*/
+ struct Constant* concatenate_string_literals(struct Constant *first,struct Constant *second)
+ {
+
+ return first;
+
+ }
+ struct Constant* get_long_long_int_constant(long long number)
+ {
+ struct Constant *ret;
+ ret=wonky_malloc(sizeof(struct Constant));
+ ret->type=(struct Type*)get_type_insecure(TS_INT,TSIGN_NONE,TC_LONG_LONG,INT_SIZE);
+ ret->value=wonky_malloc(sizeof(number));
+ *(long long *)ret->value = number;
+
+ return ret;
+ }
#endif
F diff --git a/src/semantics/value/constant.h b/src/semantics/value/constant.h --- a/src/semantics/value/constant.h +++ b/src/semantics/value/constant.h
struct Constant* extract_literal_wide_string(char *data,size_t data_size);
+ struct Constant* concatenate_string_literals(struct Constant *first,struct Constant *second);
+ struct Constant* get_long_long_int_constant(long long number);
struct Constant* get_unsigned_int_constant();
struct Constant* get_signed_int_constant();
F diff --git a/src/syntax/automatas/automata.c b/src/syntax/automatas/automata.c --- a/src/syntax/automatas/automata.c +++ b/src/syntax/automatas/automata.c
.last_defined_macro_with_this_id=(struct token*)&(struct token_defined_unary_operator){.type=PKW_DEFINE},
.last_use_as_a_macro_argument=NULL,
};
-
enum Source_Chars get_ch(const char *str,size_t limit)
{
return compress[*str];
F diff --git a/src/syntax/automatas/automata.h b/src/syntax/automatas/automata.h --- a/src/syntax/automatas/automata.h +++ b/src/syntax/automatas/automata.h
extern struct Automata_Node chonky_jr[];
extern struct identifier defined_special_identifier;
+ extern struct identifier file_special_identifier;
+
#endif
F diff --git a/src/syntax/automatas/automata.hh b/src/syntax/automatas/automata.hh --- a/src/syntax/automatas/automata.hh +++ b/src/syntax/automatas/automata.hh
PKW_PRAGMA,
PKW_COMMENT,
PKW_NOTYPE,
+ PKW_FILE_MACRO,
+ PKW_DATE_MACRO,
+ PKW_LINE_MACRO,
+ PKW_STDC_MACRO,
+ PKW_STDC_HOSTED_MACRO,
+ PKW_STDC_VERSION_MACRO,
+ PKW_TIME_MACRO,
LT_EOF,
LT_ERROR,
F diff --git a/src/syntax/automatas/generator/generator.c b/src/syntax/automatas/generator/generator.c --- a/src/syntax/automatas/generator/generator.c +++ b/src/syntax/automatas/generator/generator.c
add_finishing_float_nodes(ret,0);
add_number_nodes(ret);
add_string_char_nodes(ret);
+ add_comment_nodes(ret);
add_id_nodes(ret);
return ret;
add_integer_suffix(add_hexadecimal_number_nodes(oct_hex_split),"KW_LONG_HEXADECIMAL_CONSTANT","KW_LONG_LONG_HEXADECIMAL_CONSTANT");
add_integer_suffix(add_octal_number_nodes(oct_hex_split),"KW_LONG_OCTAL_CONSTANT","KW_LONG_LONG_OCTAL_CONSTANT");
}
+ void add_comment_nodes(struct Generator_Node *node)
+ {
+ struct Generator_Node *base;
+ struct Generator_Node *star;
+ struct Generator_Node *star_possible_finish;
+ struct Generator_Node *star_finish;
+ struct Generator_Node *double_slash;
+ struct Generator_Node *double_slash_end;
+
+ wonky_assert(node!=NULL);
+
+ base=get_delta_of(node,CHAR_FORWARD_SLASH);
+
+ wonky_assert(base!=NULL);
+
+ star=get_delta_of(base,CHAR_STAR);
+ connect_node(star,star,NULL,CHAR_NONE,CHAR_STAR-1,0);
+ connect_node(star,star,NULL,CHAR_STAR+1,CHAR_ENUM_END,1);
+ star_possible_finish=get_delta_of(star,CHAR_STAR);
+ connect_node(star_possible_finish,star,NULL,CHAR_NONE,CHAR_FORWARD_SLASH-1,0);
+ connect_node(star_possible_finish,star,NULL,CHAR_FORWARD_SLASH+1,CHAR_ENUM_END,0);
+ star_finish=get_delta_of(star_possible_finish,CHAR_FORWARD_SLASH);
+ star_finish->data_string="NULL";
+ star_finish->kw_string="KW_COMMENT";
+ star_finish->pkw_string="KW_COMMENT";
+ star_finish->action_string="AUTOMATA_ACTION_NO_ACTION";
+
+ double_slash=get_delta_of(base,CHAR_FORWARD_SLASH);
+ connect_node(double_slash,double_slash,NULL,CHAR_NONE,CHAR_FORM_FEED_TAB-1,0);
+ connect_node(double_slash,double_slash,NULL,CHAR_FORM_FEED_TAB+1,CHAR_ENUM_END,0);
+ double_slash->data_string="NULL";
+ double_slash->kw_string="KW_COMMENT";
+ double_slash->pkw_string="KW_COMMENT";
+ double_slash->action_string="AUTOMATA_ACTION_NO_ACTION";
+
+ double_slash_end=get_delta_of(double_slash,CHAR_FORM_FEED_TAB);
+ double_slash_end->data_string="NULL";
+ double_slash_end->kw_string="KW_COMMENT";
+ double_slash_end->pkw_string="KW_COMMENT";
+ double_slash_end->action_string="AUTOMATA_ACTION_NO_ACTION";
+
+
+
+ }
+ struct Generator_Node* get_delta_of(struct Generator_Node *node,enum Source_Chars ch)
+ {
+ if(node->output.delta[ch]!=NULL)
+ {
+ return (struct Generator_Node*)node->output.delta[ch];
+ }else
+ {
+ return (struct Generator_Node*)(node->output.delta[ch]=(struct Automata_Node*)get_generator_node(null_str,"KW_NOTYPE","KW_NOTYPE",automata_no_action_str));
+ }
+ }
struct Generator_Node* add_decimal_number_nodes(struct Generator_Node *node)
{
struct Generator_Node *decimal_node;
connect_node(node,inner_char_node,NULL,CHAR_SINGLE_QUOTE,CHAR_SINGLE_QUOTE,0);
connect_node(inner_char_node,ending_char_node,NULL,CHAR_SINGLE_QUOTE,CHAR_SINGLE_QUOTE,0);
- connect_node(inner_char_node,inner_char_node,NULL,0,CHAR_ENUM_END,0);
+ connect_node(inner_char_node,inner_char_node,NULL,0,CHAR_FORM_FEED_TAB-1,0);
+ connect_node(inner_char_node,inner_char_node,NULL,CHAR_FORM_FEED_TAB+1,CHAR_ENUM_END,0);
connect_node(node,inner_string_node,NULL,CHAR_DOUBLE_QUOTE,CHAR_DOUBLE_QUOTE,0);
connect_node(inner_string_node,ending_string_node,NULL,CHAR_DOUBLE_QUOTE,CHAR_DOUBLE_QUOTE,0);
- connect_node(inner_string_node,inner_string_node,NULL,0,CHAR_ENUM_END,0);
+ connect_node(inner_string_node,inner_string_node,NULL,0,CHAR_FORM_FEED_TAB-1,0);
+ connect_node(inner_string_node,inner_string_node,NULL,CHAR_FORM_FEED_TAB+1,CHAR_ENUM_END,0);
}
void add_finishing_float_nodes(struct Generator_Node *node,_Bool has_read_digits)
{
F diff --git a/src/syntax/automatas/generator/generator.h b/src/syntax/automatas/generator/generator.h --- a/src/syntax/automatas/generator/generator.h +++ b/src/syntax/automatas/generator/generator.h
void add_number_nodes(struct Generator_Node *node);
+ void add_comment_nodes(struct Generator_Node *node);
+ struct Generator_Node* get_delta_of(struct Generator_Node *node,enum Source_Chars ch);
+
/*these return the last generated node so we can add L and LL to them*/
struct Generator_Node* add_decimal_number_nodes(struct Generator_Node *node);
struct Generator_Node* add_hexadecimal_number_nodes(struct Generator_Node *node);
F diff --git a/src/syntax/automatas/generator/keyword_list.c b/src/syntax/automatas/generator/keyword_list.c --- a/src/syntax/automatas/generator/keyword_list.c +++ b/src/syntax/automatas/generator/keyword_list.c
},
{
.keyword="else",
- .preprocessing_kw_string="KW_ELSE",
+ .preprocessing_kw_string="PKW_ELSE",
.kw_string="KW_ELSE",
.action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
.data_string="NULL"
},
{
.keyword="if",
- .preprocessing_kw_string="KW_IF",
+ .preprocessing_kw_string="PKW_IF",
.kw_string="KW_IF",
.action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
.data_string="NULL"
.action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
.data_string="&defined_special_identifier"
},
+ {
+ .keyword="ifdef",
+ .preprocessing_kw_string="PKW_IFDEF",
+ .kw_string="KW_ID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="elif",
+ .preprocessing_kw_string="PKW_ELIF",
+ .kw_string="KW_ID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="endif",
+ .preprocessing_kw_string="PKW_ENDIF",
+ .kw_string="KW_ID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="include",
+ .preprocessing_kw_string="PKW_INCLUDE",
+ .kw_string="KW_ID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="define",
+ .preprocessing_kw_string="PKW_DEFINE",
+ .kw_string="KW_ID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="undef",
+ .preprocessing_kw_string="PKW_UNDEF",
+ .kw_string="KW_ID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="line",
+ .preprocessing_kw_string="PKW_LINE",
+ .kw_string="KW_ID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="error",
+ .preprocessing_kw_string="PKW_ERROR",
+ .kw_string="KW_ID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="pragma",
+ .preprocessing_kw_string="PKW_PRAGMA",
+ .kw_string="KW_ID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="__FILE__",
+ .preprocessing_kw_string="PKW_FILE_MACRO",
+ .kw_string="PKW_FILE_MACRO",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="__DATE__",
+ .preprocessing_kw_string="PKW_DATE_MACRO",
+ .kw_string="PKW_DATE_MACRO",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="__LINE__",
+ .preprocessing_kw_string="PKW_LINE_MACRO",
+ .kw_string="PKW_LINE_MACRO",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="__STDC__",
+ .preprocessing_kw_string="PKW_STDC_MACRO",
+ .kw_string="PKW_STDC_MACRO",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="__STDC_HOSTED__",
+ .preprocessing_kw_string="PKW_STDC_HOSTED_MACRO",
+ .kw_string="PKW_STDC_HOSTED_MACRO",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="__STDC_VERSION__",
+ .preprocessing_kw_string="PKW_STDC_VERSION_MACRO",
+ .kw_string="PKW_STDC_VERSION_MACRO",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="__TIME__",
+ .preprocessing_kw_string="PKW_TIME_MACRO",
+ .kw_string="PKW_TIME_MACRO",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
};
size_t number_of_chonky_keywords=sizeof(chonky_keywords)/sizeof(chonky_keywords[0]);
F diff --git a/src/syntax/macro.c b/src/syntax/macro.c new file mode 100644 --- /dev/null +++ b/src/syntax/macro.c
+ #ifndef WONKY_MACRO_C
+ #define WONKY_MACRO_C WONKY_MACRO_C
+ #include <macro.h>
+
+
+ #endif
F diff --git a/src/syntax/macro.h b/src/syntax/macro.h new file mode 100644 --- /dev/null +++ b/src/syntax/macro.h
+ #ifndef WONKY_MACRO_H
+ #define WONKY_MACRO_H WONKY_MACRO_H
+ #include <macro.hh>
+ #include <stddef.h>
+
+
+ struct normal_define_directive
+ {
+ struct identifier *id;
+ struct Queue *replacement_tokens;
+ struct Translation_Unit *the_last_place_this_macro_was_defined;
+ };
+ struct functionlike_define_directive
+ {
+ struct identifier *id;
+ struct Queue *arguments;
+ struct Queue *replacement_tokens;
+ struct Translation_Unit *the_last_place_this_macro_was_defined;
+ };
+ struct functionlike_define_directive_argument
+ {
+ struct token_functionlike_define_directive *belongs_to;
+ struct Queue_Node *first_in_argument_substitution_tokens;
+ size_t number_of_substitution_tokens;
+ };
+ #endif
F diff --git a/src/syntax/macro.hh b/src/syntax/macro.hh new file mode 100644 --- /dev/null +++ b/src/syntax/macro.hh
+ #ifndef WONKY_MACRO_HH
+ #define WONKY_MACRO_HH WONKY_MACRO_HH
+
+ struct functionlike_define_directive_argument;
+ struct normal_define_directive;
+ struct functionlike_define_directive_argument;
+
+ #endif
F diff --git a/src/syntax/token/token.c b/src/syntax/token/token.c --- a/src/syntax/token/token.c +++ b/src/syntax/token/token.c
*cpy=*src;
return cpy;
}
-
-
struct token* get_id_token(struct identifier *id,struct Source_Location *location)
{
struct token_identifier *ret;
return (struct token*)ret;
}
+ struct token* get_constant_long_long_int_token(struct Source_Location *location,long long int number)
+ {
+ struct token_constant *ret;
+ ret=wonky_malloc(sizeof(struct token_constant));
+ ret->location=location;
+ ret->type=KW_CONSTANT;
+ ret->constant=get_long_long_int_constant(number);
+ return (struct token*)ret;
+ }
struct token* get_string_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size)
{
struct token_string *ret;
return ret;
}
+ struct token* get_token_from_two_adjacent_strings(struct token_string *first,struct token_string *second)
+ {
+ struct token_string *ret;
+ ret=wonky_malloc(sizeof(struct token_string));
+ ret->type=KW_STRING;
+ ret->location=first->location;
+ ret->constant=concatenate_string_literals(first->constant,second->constant);
+
+ return (struct token*)ret;
+ }
+ struct token* get_file_macro_token(struct Source_Location *token_location)
+ {
+ struct token *ret;
+ ret=wonky_malloc(sizeof(struct token));
+ ret->type=PKW_FILE_MACRO;
+ ret->location=token_location;
+
+ return ret;
+ }
+
+ static char *ascdate(const struct tm *timeptr)
+ {
+ static const char mon_name[12][3] = {
+ "Jan", "Feb", "Mar", "Apr", "May", "Jun",
+ "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"
+ };
+ static char result[30];
+
+ if(timeptr==NULL || timeptr->tm_mday>=33 || timeptr->tm_mon>=12 || timeptr->tm_year>=9999)
+ {
+ wonky_assert(SHOULD_NOT_REACH_HERE);
+ return "Could not determine the date";
+ }
+
+ sprintf(result, "%.3s %s%d %d",
+ mon_name[timeptr->tm_mon],
+ (timeptr->tm_mday<10?" ":""),
+ timeptr->tm_mday,
+ 1900 + timeptr->tm_year);
+ return result;
+ }
+ struct token* get_date_macro_token(struct Source_Location *token_location)
+ {
+ struct tm *tm;
+ time_t t;
+ char *ret;
+ size_t size;
+
+ t=time(NULL);
+
+ if((time_t)(-1)==t)
+ goto error;
+
+ tm=gmtime(&t);
+
+ if(tm==NULL)
+ goto error;
+
+
+ ret=ascdate(tm);
+ size=gstrnlen(ret,27);
+ return (struct token*)get_string_token(KW_STRING,token_location,ret,size);
+
+ error:
+
+ return (struct token*)get_string_token(KW_STRING,token_location,"Time could not be determined",sizeof("Time could not be determined"));
+ }
+ struct token* get_line_macro_token(struct Source_Location *token_location)
+ {
+ struct token *ret;
+ ret=wonky_malloc(sizeof(struct token));
+ ret->type=PKW_LINE_MACRO;
+ ret->location=token_location;
+
+ return ret;
+ }
+ struct token* get_stdc_macro_token(struct Source_Location *token_location)
+ {
+ struct token *ret;
+ ret=wonky_malloc(sizeof(struct token));
+ ret->type=PKW_STDC_MACRO;
+ ret->location=token_location;
+
+ return ret;
+ }
+ struct token* get_stdc_hosted_macro_token(struct Source_Location *token_location)
+ {
+ struct token *ret;
+ ret=wonky_malloc(sizeof(struct token));
+ ret->type=PKW_STDC_HOSTED_MACRO;
+ ret->location=token_location;
+
+ return ret;
+ }
+ struct token* get_stdc_version_macro_token(struct Source_Location *token_location)
+ {
+ struct token *ret;
+ ret=wonky_malloc(sizeof(struct token));
+ ret->type=PKW_STDC_VERSION_MACRO;
+ ret->location=token_location;
+
+ return ret;
+ }
+ static char *asctime_for_macro(const struct tm *timeptr)
+ {
+ static char result[30];
+
+ if(timeptr==NULL || timeptr->tm_hour>24 || timeptr->tm_sec>60 || timeptr->tm_min>60)
+ {
+ wonky_assert(SHOULD_NOT_REACH_HERE);
+ return "Could not determine the date";
+ }
+
+ sprintf(result, "%.2d:%.2d:%.2d",
+ timeptr->tm_hour,
+ timeptr->tm_min,
+ timeptr->tm_sec
+ );
+ return result;
+ }
+ struct token* get_time_macro_token(struct Source_Location *token_location)
+ {
+ struct tm *tm;
+ time_t t;
+ char *ret;
+ size_t size;
+
+ t=time(NULL);
+
+ if((time_t)(-1)==t)
+ goto error;
+
+ tm=gmtime(&t);
+
+ if(tm==NULL)
+ goto error;
+
+
+ ret=asctime_for_macro(tm);
+ size=gstrnlen(ret,27);
+ return (struct token*)get_string_token(KW_STRING,token_location,ret,size);
+
+ error:
+
+ return (struct token*)get_string_token(KW_STRING,token_location,"Time could not be determined",sizeof("Time could not be determined"));
+ }
#endif
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h --- a/src/syntax/token/token.h +++ b/src/syntax/token/token.h
#define WONKY_TOKEN_H WONKY_TOKEN_H
#include <token.hh>
+ #include <time.h>
+
#include <automata.h>
#include <constant.h>
#include <wonky_malloc.h>
#include <wonky_assert.h>
#include <source_file.h>
+ #include <macro.h>
/*the tokens are a bit heavy*/
{
enum LEXER_TYPE type;
struct Source_Location *location;
- struct identifier *id;
- struct Queue *replacement_tokens;
- struct Translation_Unit *the_last_place_this_macro_was_defined;
- };
- struct functionlike_define_directive_argument
- {
- struct token_functionlike_define_directive *belongs_to;
- struct Queue_Node *first_in_argument_substitution_tokens;
- size_t number_of_substitution_tokens;
+ struct normal_define_directive *define;
};
struct token_functionlike_define_directive
{
enum LEXER_TYPE type;
struct Source_Location *location;
- struct identifier *id;
- struct Queue *arguments;
- struct Queue *replacement_tokens;
- struct Translation_Unit *the_last_place_this_macro_was_defined;
+ struct functionlike_define_directive_argument *define;
};
struct token_undef_directive
{
const char *filename;
};
*/
+
+
+
struct token* copy_token(struct token *src);
void handle_splicing(struct token *word);
char compare_tokens(struct token *a,struct token *b);
-
struct token* get_id_token(struct identifier *id,struct Source_Location *location);
struct token* get_keyword_token(enum LEXER_TYPE type,struct Source_Location *location);
struct token* get_punctuator_token(enum LEXER_TYPE type,struct Source_Location *location);
struct token* get_constant_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size);
+ struct token* get_constant_long_long_int_token(struct Source_Location *location,long long int number);
struct token* get_string_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size);
struct token* get_include_directive_token(struct Source_Location *location,struct Queue *tokens);
struct token* get_if_directive_token(struct Source_Location *location,struct Queue *controlling_tokens,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
struct token* get_hashtag_hashtag_unary_operator(struct Source_Location *location,struct Queue *operands);
struct token* get_error_token(const char *msg,struct Source_Location *location,struct Program *program,...);
struct token* get_eof_token();
+ struct token* get_token_from_two_adjacent_strings(struct token_string *first,struct token_string *second);
+ struct token* get_file_macro_token(struct Source_Location *token_location);
+ struct token* get_date_macro_token(struct Source_Location *token_location);
+ struct token* get_line_macro_token(struct Source_Location *token_location);
+ struct token* get_stdc_macro_token(struct Source_Location *token_location);
+ struct token* get_stdc_hosted_macro_token(struct Source_Location *token_location);
+ struct token* get_stdc_version_macro_token(struct Source_Location *token_location);
+ struct token* get_time_macro_token(struct Source_Location *token_location);
#endif
F diff --git a/src/syntax/token/token.hh b/src/syntax/token/token.hh --- a/src/syntax/token/token.hh +++ b/src/syntax/token/token.hh
struct token_ifdef_directive;
struct token_ifndef_directive;
struct token_normal_define_directive;
- struct functionlike_define_directive_argument;
struct token_functionlike_define_directive;
struct token_undef_directive;
struct token_line_directive;