WONKY



LOG | FILES | OVERVIEW


F diff --git a/build/cmake/generator.txt b/build/cmake/generator.txt --- a/build/cmake/generator.txt +++ b/build/cmake/generator.txt
set(GENERATOR_SOURCES
- src/frontend/lex/automatas/generator/generator.c
- src/frontend/lex/automatas/generator/keyword_list.c
+ src/syntax/automatas/generator/generator.c
+ src/syntax/automatas/generator/keyword_list.c
)
add_executable(generator ${GENERATOR_SOURCES})
F diff --git a/build/cmake/include_directories.txt b/build/cmake/include_directories.txt --- a/build/cmake/include_directories.txt +++ b/build/cmake/include_directories.txt
include_directories(src/)
- include_directories(src/backend/js)
- include_directories(src/backend/text/print)
- include_directories(src/backend/text)
+ include_directories(src/backend)
include_directories(src/backend/asm)
include_directories(src/backend/asm/intel)
- include_directories(src/backend)
+ include_directories(src/backend/js)
+ include_directories(src/backend/text)
+ include_directories(src/backend/text/print)
+ include_directories(src/debug)
+ include_directories(src/debug/wobler)
+ include_directories(src/environment)
+ include_directories(src/environment/command_arguments)
+ include_directories(src/environment/error)
include_directories(src/frontend)
include_directories(src/frontend/lex)
- include_directories(src/frontend/lex/automatas)
- include_directories(src/frontend/lex/automatas/generator)
include_directories(src/frontend/parse)
+ include_directories(src/misc)
include_directories(src/semantics)
+ include_directories(src/semantics/constraints)
include_directories(src/semantics/identifiers)
include_directories(src/semantics/memory)
- include_directories(src/semantics/value)
- include_directories(src/semantics/constraints)
include_directories(src/semantics/program)
- include_directories(src/debug)
- include_directories(src/debug/wobler)
- include_directories(src/misc)
- include_directories(src/environment)
- include_directories(src/environment/command_arguments)
- include_directories(src/environment/error)
-
+ include_directories(src/semantics/value)
+ include_directories(src/syntax)
+ include_directories(src/syntax/automatas)
+ include_directories(src/syntax/automatas/generator)
+ include_directories(src/syntax/identifier)
+ include_directories(src/syntax/token)
F diff --git a/build/cmake/libs/automata_inner.txt b/build/cmake/libs/automata_inner.txt --- a/build/cmake/libs/automata_inner.txt +++ b/build/cmake/libs/automata_inner.txt
add_library(automata_inner STATIC
- src/frontend/lex/automatas/automata.c
+ src/syntax/automatas/automata.c
)
F diff --git a/build/cmake/libs/innards.txt b/build/cmake/libs/innards.txt --- a/build/cmake/libs/innards.txt +++ b/build/cmake/libs/innards.txt
src/semantics/identifiers/denoted.c
src/semantics/identifiers/linkage.c
src/semantics/identifiers/scope.c
- src/semantics/memory/location.c
+ src/semantics/memory/memory_location.c
src/semantics/memory/object.c
src/semantics/program/program.c
src/semantics/value/constant.c
src/semantics/value/initialiser.c
src/semantics/value/type.c
src/semantics/value/value.c
+ src/syntax/identifier/identifier.c
+ src/syntax/source_file.c
+ src/syntax/token/token.c
+ src/syntax/automatas/automata.c
)
F diff --git a/lkfjas b/lkfjas new file mode 100644 --- /dev/null +++ b/lkfjas
+ .
+ ├── build
+ │   └── cmake
+ │   ├── generator.txt
+ │   ├── include_directories.txt
+ │   ├── libs
+ │   │   ├── automata_inner.txt
+ │   │   ├── chonky.txt
+ │   │   ├── innards.txt
+ │   │   ├── misc.txt
+ │   │   ├── wobler_assert.txt
+ │   │   └── wonky_assert.txt
+ │   ├── libs.txt
+ │   ├── prebuild.txt
+ │   ├── wobler.txt
+ │   └── wonky.txt
+ ├── CMakeLists.txt
+ ├── doc
+ │   ├── arch.txt
+ │   ├── build.txt
+ │   ├── hier.txt
+ │   ├── scribblings
+ │   │   └── preprocessor_rewrite.txt
+ │   ├── tests.txt
+ │   └── todo.txt
+ ├── GPATH
+ ├── GRTAGS
+ ├── GTAGS
+ ├── README.txt
+ ├── src
+ │   ├── backend
+ │   │   ├── asm
+ │   │   │   └── intel
+ │   │   │   ├── intel_asm.c
+ │   │   │   ├── intel_asm.h
+ │   │   │   ├── intel_asm.hh
+ │   │   │   ├── intel_compile.c
+ │   │   │   ├── intel_compile.h
+ │   │   │   ├── intel_instruction.c
+ │   │   │   ├── intel_instruction.h
+ │   │   │   ├── intel_instruction.hh
+ │   │   │   ├── intel_location.c
+ │   │   │   ├── intel_location.h
+ │   │   │   └── intel_location.hh
+ │   │   ├── compile.c
+ │   │   ├── compile.h
+ │   │   ├── compile.hh
+ │   │   └── text
+ │   │   ├── lines.c
+ │   │   ├── lines.h
+ │   │   ├── lines.hh
+ │   │   └── print
+ │   │   ├── print.c
+ │   │   ├── print.h
+ │   │   └── print.hh
+ │   ├── common.h
+ │   ├── debug
+ │   │   ├── debug_ast.c
+ │   │   ├── debug_ast.h
+ │   │   ├── debug_denoted.c
+ │   │   ├── debug_denoted.h
+ │   │   ├── debug.h
+ │   │   ├── debug_initialiser.c
+ │   │   ├── debug_initialiser.h
+ │   │   ├── debug_lexer.c
+ │   │   ├── debug_lexer.h
+ │   │   ├── debug_linkage.c
+ │   │   ├── debug_linkage.h
+ │   │   ├── debug_scope.c
+ │   │   ├── debug_scope.h
+ │   │   ├── debug_type.c
+ │   │   ├── debug_type.h
+ │   │   ├── debug_value.c
+ │   │   ├── debug_value.h
+ │   │   ├── wobler
+ │   │   │   ├── wobler_assert.c
+ │   │   │   ├── wobler.c
+ │   │   │   ├── wobler_declarations.h
+ │   │   │   ├── wobler.h
+ │   │   │   └── wobler_tests.h
+ │   │   ├── wonky_assert.c
+ │   │   └── wonky_assert.h
+ │   ├── environment
+ │   │   ├── command_arguments
+ │   │   │   ├── gcc_arguments.c
+ │   │   │   ├── gcc_arguments.h
+ │   │   │   └── gcc_arguments.hh
+ │   │   └── error
+ │   │   ├── gcc_error.c
+ │   │   ├── gcc_error.h
+ │   │   └── gcc_error.hh
+ │   ├── frontend
+ │   │   ├── lex
+ │   │   │   ├── lexer.c
+ │   │   │   ├── lexer.h
+ │   │   │   ├── lexer.hh
+ │   │   │   ├── preprocessing.c
+ │   │   │   ├── preprocessing.h
+ │   │   │   └── preprocessing.hh
+ │   │   └── parse
+ │   │   ├── parse_declaration.c
+ │   │   ├── parse_declaration.h
+ │   │   ├── parse_expression.c
+ │   │   ├── parse_expression.h
+ │   │   ├── parse.h
+ │   │   ├── parse_statement.c
+ │   │   ├── parse_statement.h
+ │   │   ├── parse_statement.hh
+ │   │   ├── parse_translation_unit.c
+ │   │   └── parse_translation_unit.h
+ │   ├── misc
+ │   │   ├── gcc_string.c
+ │   │   ├── gcc_string.h
+ │   │   ├── map.c
+ │   │   ├── map.h
+ │   │   ├── map.hh
+ │   │   ├── queue.c
+ │   │   ├── queue.h
+ │   │   ├── queue.hh
+ │   │   ├── stack.c
+ │   │   ├── stack.h
+ │   │   ├── stack.hh
+ │   │   ├── wonky_malloc.c
+ │   │   ├── wonky_malloc.h
+ │   │   └── wonky_malloc.hh
+ │   ├── semantics
+ │   │   ├── ast.c
+ │   │   ├── ast.h
+ │   │   ├── ast.hh
+ │   │   ├── constraints
+ │   │   │   ├── constraints.h
+ │   │   │   ├── expression_constraints.c
+ │   │   │   ├── expression_constraints.h
+ │   │   │   ├── initialiser_constraints.c
+ │   │   │   ├── initialiser_constraints.h
+ │   │   │   ├── linkage_constraints.c
+ │   │   │   ├── linkage_constraints.h
+ │   │   │   ├── statement_constraints.c
+ │   │   │   └── statement_constraints.h
+ │   │   ├── identifiers
+ │   │   │   ├── denoted.c
+ │   │   │   ├── denoted.h
+ │   │   │   ├── denoted.hh
+ │   │   │   ├── linkage.c
+ │   │   │   ├── linkage.h
+ │   │   │   ├── linkage.hh
+ │   │   │   ├── scope.c
+ │   │   │   ├── scope.h
+ │   │   │   └── scope.hh
+ │   │   ├── memory
+ │   │   │   ├── memory_location.c
+ │   │   │   ├── memory_location.h
+ │   │   │   ├── memory_location.hh
+ │   │   │   ├── object.c
+ │   │   │   ├── object.h
+ │   │   │   └── object.hh
+ │   │   ├── program
+ │   │   │   ├── program.c
+ │   │   │   ├── program.h
+ │   │   │   ├── program.hh
+ │   │   │   ├── translation_unit.c
+ │   │   │   ├── translation_unit.h
+ │   │   │   └── translation_unit.hh
+ │   │   └── value
+ │   │   ├── constant.c
+ │   │   ├── constant.h
+ │   │   ├── constant.hh
+ │   │   ├── evaluation.c
+ │   │   ├── evaluation.h
+ │   │   ├── initialiser.c
+ │   │   ├── initialiser.h
+ │   │   ├── initialiser.hh
+ │   │   ├── type.c
+ │   │   ├── type.h
+ │   │   ├── type.hh
+ │   │   ├── value.c
+ │   │   ├── value.h
+ │   │   └── value.hh
+ │   ├── syntax
+ │   │   ├── automatas
+ │   │   │   ├── automata.c
+ │   │   │   ├── automata.h
+ │   │   │   ├── automata.hh
+ │   │   │   └── generator
+ │   │   │   ├── generator.c
+ │   │   │   ├── generator.h
+ │   │   │   ├── generator.hh
+ │   │   │   ├── keyword_list.c
+ │   │   │   ├── keyword_list.h
+ │   │   │   └── keyword_list.hh
+ │   │   ├── identifier
+ │   │   │   ├── identifier.c
+ │   │   │   ├── identifier.h
+ │   │   │   └── identifier.hh
+ │   │   ├── source_file.c
+ │   │   ├── source_file.h
+ │   │   ├── source_file.hh
+ │   │   └── token
+ │   │   ├── token.c
+ │   │   ├── token.h
+ │   │   └── token.hh
+ │   ├── wonky.c
+ │   └── wonky.h
+ ├── tests
+ │   ├── test3.c
+ │   ├── test5.c
+ │   ├── test_bitfield_error2.c
+ │   ├── test_bitfield_error3.c
+ │   ├── test_bitfield_error.c
+ │   ├── test_bitfields.c
+ │   ├── test_conditional_expression.c
+ │   ├── test_declaration2.c
+ │   ├── test_declaration.c
+ │   ├── test_declaration_error.c
+ │   ├── test_declaration_speed.c
+ │   ├── test_digraphs.c
+ │   ├── test_for_cycle_declaration.c
+ │   ├── test_function_definition.c
+ │   ├── test_function_definition_error2.c
+ │   ├── test_function_definition_error.c
+ │   ├── test_generic.c
+ │   ├── test_generic_error.c
+ │   ├── test_linkage2.c
+ │   ├── test_linkage.c
+ │   ├── test_linkage_error2.c
+ │   ├── test_linkage_error.c
+ │   ├── test_preproc_error.c
+ │   ├── test_typedef.c
+ │   ├── test_undeclared_error.c
+ │   ├── test_variadic_function.c
+ │   ├── test_variadic_function_error2.c
+ │   └── test_variadic_function_error.c
+ └── tools
+ └── wsh
+
+ 33 directories, 200 files
F diff --git a/src/frontend/lex/automatas/automata.c b/src/frontend/lex/automatas/automata.c deleted file mode 100644 --- a/src/frontend/lex/automatas/automata.c +++ /dev/null
- #ifndef WONKY_AUTOMATA_C
- #define WONKY_AUTOMATA_C WONKY_AUTOMATA_C
- #include <automata.h>
-
- enum Source_Chars compress[256]
- =
- {
- ['a']=CHAR_a,
- ['b']=CHAR_b,
- ['c']=CHAR_c,
- ['d']=CHAR_d,
- ['e']=CHAR_e,
- ['f']=CHAR_f,
- ['g']=CHAR_g,
- ['h']=CHAR_h,
- ['i']=CHAR_i,
- ['j']=CHAR_j,
- ['k']=CHAR_k,
- ['l']=CHAR_l,
- ['m']=CHAR_m,
- ['n']=CHAR_n,
- ['o']=CHAR_o,
- ['p']=CHAR_p,
- ['q']=CHAR_q,
- ['r']=CHAR_r,
- ['s']=CHAR_s,
- ['t']=CHAR_t,
- ['u']=CHAR_u,
- ['v']=CHAR_v,
- ['w']=CHAR_w,
- ['x']=CHAR_x,
- ['y']=CHAR_y,
- ['z']=CHAR_z,
- ['A']=CHAR_A,
- ['B']=CHAR_B,
- ['C']=CHAR_C,
- ['D']=CHAR_D,
- ['E']=CHAR_E,
- ['F']=CHAR_F,
- ['G']=CHAR_G,
- ['H']=CHAR_H,
- ['I']=CHAR_I,
- ['J']=CHAR_J,
- ['K']=CHAR_K,
- ['L']=CHAR_L,
- ['M']=CHAR_M,
- ['N']=CHAR_N,
- ['O']=CHAR_O,
- ['P']=CHAR_P,
- ['Q']=CHAR_Q,
- ['R']=CHAR_R,
- ['S']=CHAR_S,
- ['T']=CHAR_T,
- ['U']=CHAR_U,
- ['V']=CHAR_V,
- ['W']=CHAR_W,
- ['X']=CHAR_X,
- ['Y']=CHAR_Y,
- ['Z']=CHAR_Z,
- ['0']=CHAR_0,
- ['1']=CHAR_1,
- ['2']=CHAR_2,
- ['3']=CHAR_3,
- ['4']=CHAR_4,
- ['5']=CHAR_5,
- ['6']=CHAR_6,
- ['7']=CHAR_7,
- ['8']=CHAR_8,
- ['9']=CHAR_9,
- ['!']=CHAR_EXCLAMATION,
- ['"']=CHAR_DOUBLE_QUOTE,
- ['#']=CHAR_HASHTAG,
- ['%']=CHAR_PERCENT,
- ['&']=CHAR_AMPERSANT,
- ['\'']=CHAR_SINGLE_QUOTE,
- ['(']=CHAR_OPEN_NORMAL,
- [')']=CHAR_CLOSE_NORMAL,
- ['*']=CHAR_STAR,
- ['+']=CHAR_PLUS,
- [',']=CHAR_COMMA,
- ['-']=CHAR_MINUS,
- ['.']=CHAR_DOT,
- ['/']=CHAR_FORWARD_SLASH,
- [':']=CHAR_COLUMN,
- [';']=CHAR_SEMI_COLUMN,
- ['<']=CHAR_LESS,
- ['=']=CHAR_EQUAL,
- ['>']=CHAR_GREATER,
- ['?']=CHAR_QUESTION,
- ['[']=CHAR_OPEN_SQUARE,
- ['\\']=CHAR_BACKWARD_SLASH,
- ['^']=CHAR_HAT,
- ['_']=CHAR_UNDERSCORE,
- ['{']=CHAR_OPEN_CURLY,
- ['|']=CHAR_PIPE,
- ['}']=CHAR_CLOSE_CURLY,
- ['~']=CHAR_TILDE,
- [' ']=CHAR_SPACE,
- ['\t']=CHAR_HORISONTAL_TAB,
- ['\v']=CHAR_VERTICAL_TAB,
- ['\n']=CHAR_FORM_FEED_TAB,
- };
-
- enum Source_Chars get_ch(const char *str,size_t limit)
- {
- return compress[*str];
- }
-
-
-
- #endif
F diff --git a/src/frontend/lex/automatas/automata.h b/src/frontend/lex/automatas/automata.h deleted file mode 100644 --- a/src/frontend/lex/automatas/automata.h +++ /dev/null
- #ifndef WONKY_AUTOMATA_H
- #define WONKY_AUTOMATA_H WONKY_AUTOMATA_H
- #include <automata.hh>
-
- #include <stddef.h>
-
- extern enum Source_Chars compress[256];
- struct Automata_Node
- {
- enum Automata_Action action;
- enum LEXER_TYPE keyword;
- void *data;
-
- struct Automata_Node *delta[CHAR_ENUM_END];
-
- };
-
- enum Source_Chars get_ch(const char *str,size_t limit);
-
- extern struct Automata_Node chonky[];
- extern struct Automata_Node chonky_jr[];
-
- #endif
F diff --git a/src/frontend/lex/automatas/automata.hh b/src/frontend/lex/automatas/automata.hh deleted file mode 100644 --- a/src/frontend/lex/automatas/automata.hh +++ /dev/null
- #ifndef WONKY_AUTOMATA_HH
- #define WONKY_AUTOMATA_HH WONKY_AUTOMATA_HH
-
- enum Automata_Action
- {
- AUTOMATA_ACTION_DISPENSE_TOKEN,
- AUTOMATA_ACTION_SWITCH_AUTOMATA,
- AUTOMATA_ACTION_MACRO_EXPANSION,
- AUTOMATA_ACTION_NO_ACTION,
- };
-
- enum Source_Chars
- {
- CHAR_NONE=0,/*0 is used in initialisors so don't change this*/
- CHAR_A,
- CHAR_B,
- CHAR_C,
- CHAR_D,
- CHAR_E,
- CHAR_F,
- CHAR_G,
- CHAR_H,
- CHAR_I,
- CHAR_J,
- CHAR_K,
- CHAR_L,
- CHAR_M,
- CHAR_N,
- CHAR_O,
- CHAR_P,
- CHAR_Q,
- CHAR_R,
- CHAR_S,
- CHAR_T,
- CHAR_U,
- CHAR_V,
- CHAR_W,
- CHAR_X,
- CHAR_Y,
- CHAR_Z,
- CHAR_a,
- CHAR_b,
- CHAR_c,
- CHAR_d,
- CHAR_e,
- CHAR_f,
- CHAR_g,
- CHAR_h,
- CHAR_i,
- CHAR_j,
- CHAR_k,
- CHAR_l,
- CHAR_m,
- CHAR_n,
- CHAR_o,
- CHAR_p,
- CHAR_q,
- CHAR_r,
- CHAR_s,
- CHAR_t,
- CHAR_u,
- CHAR_v,
- CHAR_w,
- CHAR_x,
- CHAR_y,
- CHAR_z,
- CHAR_0,
- CHAR_1,
- CHAR_2,
- CHAR_3,
- CHAR_4,
- CHAR_5,
- CHAR_6,
- CHAR_7,
- CHAR_8,
- CHAR_9,
- CHAR_EXCLAMATION,
- CHAR_DOUBLE_QUOTE,
- CHAR_HASHTAG,
- CHAR_PERCENT,
- CHAR_AMPERSANT,
- CHAR_SINGLE_QUOTE,
- CHAR_OPEN_NORMAL,
- CHAR_CLOSE_NORMAL,
- CHAR_STAR,
- CHAR_PLUS,
- CHAR_COMMA,
- CHAR_MINUS,
- CHAR_DOT,
- CHAR_FORWARD_SLASH,
- CHAR_COLUMN,
- CHAR_SEMI_COLUMN,
- CHAR_LESS,
- CHAR_EQUAL,
- CHAR_GREATER,
- CHAR_QUESTION,
- CHAR_OPEN_SQUARE,
- CHAR_BACKWARD_SLASH,
- CHAR_CLOSE_SQUARE,
- CHAR_HAT,
- CHAR_UNDERSCORE,
- CHAR_OPEN_CURLY,
- CHAR_PIPE,
- CHAR_CLOSE_CURLY,
- CHAR_TILDE,
- CHAR_SPACE,
- CHAR_HORISONTAL_TAB,
- CHAR_VERTICAL_TAB,
- CHAR_FORM_FEED_TAB,
-
- CHAR_ENUM_END
- };
-
- enum LEXER_TYPE
- {
- KW_AUTO,
- KW_DO,
- KW_DOUBLE,
- KW_INT,
- KW_STRUCT,
- KW_BREAK,
- KW_ELSE,
- KW_DEFINED,
- KW_LONG,
- KW_SWITCH,
- KW_CASE,
- KW_ENUM,
- KW_REGISTER,
- KW_TYPEDEF,
- KW_CHAR,
- KW_EXTERN,
- KW_RETURN,
- KW_UNION,
- KW_CONST,
- KW_FLOAT,
- KW_SHORT,
- KW_UNSIGNED,
- KW_CONTINUE,
- KW_FOR,
- KW_SIGNED,
- KW_VOID,
- KW_DEFAULT,
- KW_GOTO,
- KW_SIZEOF,
- KW_VOLATILE,
- KW_IF,
- KW_STATIC,
- KW_WHILE,
- KW_EXCLAMATION,
- KW_PERCENT,
- KW_AND,
- KW_AND_AND,
- KW_OPEN_NORMAL,
- KW_CLOSE_NORMAL,
- KW_STAR,
- KW_PLUS,
- KW_COMMA,
- KW_MINUS,
- KW_DOT,
- KW_ARROW,
- KW_COLUMN,
- KW_SEMI_COLUMN,
- KW_LESS,
- KW_EQ,
- KW_EQEQ,
- KW_MORE,
- KW_QUESTION,
- KW_HAT,
- KW_PIPE,
- KW_PIPE_PIPE,
- KW_TILDE,
- KW_PLUSPLUS,
- KW_MINUSMINUS,
- KW_SHIFT_RIGHT,
- KW_SHIFT_LEFT,
- KW_LESS_EQ,
- KW_MORE_EQ,
- KW_NOT_EQ,
- KW_PLUS_EQ,
- KW_MINUS_EQ,
- KW_STAR_EQ,
- KW_PERCENT_EQ,
- KW_SHIFT_LEFT_EQ,
- KW_SHIFT_RIGHT_EQ,
- KW_AND_EQ,
- KW_HAT_EQ,
- KW_PIPE_EQ,
- KW_HASHTAG,
- KW_HASHTAG_HASHTAG,
- KW_ELIPSIS,
- KW_DIV,
- KW_INLINE,
- KW_RESTRICT,
- KW_BOOL,
- KW_COMPLEX,
- KW_IMAGINARY,
- KW_OPEN_SQUARE,
- KW_CLOSE_SQUARE,
- KW_CLOSE_CURLY,
- KW_OPEN_CURLY,
- KW_DIV_EQ,
- KW_FORWARD_SLASH,
- KW_NOTYPE,
- KW_HEXADECIMAL_CONSTANT,
- KW_DECIMAL_CONSTANT,
- KW_OCTAL_CONSTANT ,
- KW_UNSIGNED_DECIMAL_CONSTANT,
- KW_UNSIGNED_OCTAL_CONSTANT,
- KW_UNSIGNED_HEXADECIMAL_CONSTANT,
- KW_UNSIGNED_LONG_HEXADECIMAL_CONSTANT,
- KW_UNSIGNED_LONG_OCTAL_CONSTANT,
- KW_UNSIGNED_LONG_DECIMAL_CONSTANT,
- KW_UNSIGNED_LONG_LONG_DECIMAL_CONSTANT,
- KW_UNSIGNED_LONG_LONG_HEXADECIMAL_CONSTANT,
- KW_UNSIGNED_LONG_LONG_OCTAL_CONSTANT,
- KW_LONG_HEXADECIMAL_CONSTANT,
- KW_LONG_OCTAL_CONSTANT,
- KW_LONG_DECIMAL_CONSTANT,
- KW_LONG_LONG_HEXADECIMAL_CONSTANT,
- KW_LONG_LONG_OCTAL_CONSTANT,
- KW_LONG_LONG_DECIMAL_CONSTANT,
- KW_DOUBLE_DECIMAL_CONSTANT,
- KW_LONG_DOUBLE_DECIMAL_CONSTANT,
- KW_FLOAT_DECIMAL_CONSTANT,
- KW_DOUBLE_HEXADECIMAL_CONSTANT,
- KW_LONG_DOUBLE_HEXADECIMAL_CONSTANT,
- KW_FLOAT_HEXADECIMAL_CONSTANT,
- KW_COMMENT,
- KW_ID,
- KW_CHAR_CONSTANT,
- KW_WIDE_CHAR_CONSTANT,
- KW_STRING,
- KW_WIDE_STRING,
- PKW_IF,
- PKW_IFDEF,
- PKW_IFNDEF,
- PKW_ELIF,
- PKW_ELSE,
- PKW_ENDIF,
- PKW_INCLUDE,
- PKW_FUNCTIONLIKE_DEFINE,
- PKW_DEFINE,
- PKW_UNDEF,
- PKW_LINE,
- PKW_ERROR,
- PKW_PRAGMA,
- PKW_COMMENT,
- PKW_NOTYPE,
- LT_EOF,
-
- LEXER_TYPE_END
- };
-
- struct Automata_Node;
-
- #endif
F diff --git a/src/frontend/lex/automatas/generator/common_keywords.initialiser b/src/frontend/lex/automatas/generator/common_keywords.initialiser deleted file mode 100644 --- a/src/frontend/lex/automatas/generator/common_keywords.initialiser +++ /dev/null
F diff --git a/src/frontend/lex/automatas/generator/generator.c b/src/frontend/lex/automatas/generator/generator.c deleted file mode 100644 --- a/src/frontend/lex/automatas/generator/generator.c +++ /dev/null
- #ifndef WONKY_AUTOMATA_GENERATOR_C
- #define WONKY_AUTOMATA_GENERATOR_C WONKY_AUTOMATA_GENERATOR_C
- #include <generator.h>
-
-
- int main()
- {
- wonky_memory_init();
-
- print_automatas();
-
- wonky_memory_delete();
-
- return 0;
- }
-
- void print_automatas()
- {
- FILE *chonky_c;
-
- chonky_c=fopen("chonky.c","w");
- if(chonky_c==NULL)
- {
- fprintf(stderr,"Could not write to chonky.c\nAborting!\n");
- exit(1);
- }
-
- fprintf(chonky_c,"#ifndef WONKY_CHONKY_C\n#define WONKY_CHONKY_C WONKY_CHONKY_C\n#include <stddef.h>\n#include <automata.h>\n\n");
- print_automata(make_chonky(),"chonky",chonky_c);
- fprintf(chonky_c,"\n#endif");
- }
- void print_automata(struct Generator_Node *root,const char *name,FILE *out)
- {
- fprintf(out,"struct Automata_Node %s[]=\n{\n",name);
- print_automata_nodes(root,name,out);
- fprintf(out,"\n};",name);
- }
- void print_automata_nodes(struct Generator_Node *node,const char *name,FILE *out)
- {
- int i;
- ssize_t node_number=0;
- struct Queue *node_queue;
- struct Generator_Node *current_node;
- struct Generator_Node *adjacent_node;
-
- node_queue=wonky_malloc(sizeof(struct Queue));
- Queue_Init(node_queue);
-
- Queue_Push(node_queue,node);
- node->node_number=0;
-
- while(node_queue->size>0)
- {
- current_node=Queue_Pop(node_queue);
- fprintf(out,"{ \n.action=%s,\n.keyword=%s,\n.data=%s,\n.delta={",current_node->action_string,current_node->kw_string,current_node->data_string);
- for(i=0;i<CHAR_ENUM_END;++i)
- if(current_node->output.delta[i]!=NULL)
- {
- adjacent_node=(struct Generator_Node*)current_node->output.delta[i];
- if(adjacent_node->node_number==NODE_NOT_NUMBERED)
- {
- adjacent_node->node_number=++node_number;
- Queue_Push(node_queue,adjacent_node);
- }
- fprintf(out,"%s+%zd ,",name,adjacent_node->node_number);
- }else
- {
- fprintf(out,"NULL, ");
- }
-
- fprintf(out,"}\n},\n");
- }
-
-
- wonky_assert(node_queue->size==0);
- wonky_free(node_queue);
- }
- struct Generator_Node* make_chonky()
- {
- struct Generator_Node *ret;
- ret=make_generator(chonky_keywords,number_of_chonky_keywords);
-
- add_finishing_float_nodes(ret,0);
- add_number_nodes(ret);
- add_string_char_nodes(ret);
- add_id_nodes(ret);
-
- return ret;
- }
- struct Generator_Node* make_generator(const struct Keyword_Entry keywords[],size_t number_of_keywords)
- {
- size_t i;
- struct Generator_Node *ret;
-
- ret=get_generator_node(null_str,no_type_str,automata_no_action_str);
-
- for(i=0;i<number_of_keywords;++i)
- insert_keyword(ret,keywords+i);
-
- return ret;
-
- }
- struct Generator_Node* insert_keyword(struct Generator_Node *node,const struct Keyword_Entry *entry)
- {
- size_t where_in_keyword;
- struct Generator_Node *current;
-
- for(where_in_keyword=0,current=node;
- entry->keyword[where_in_keyword]!='\0' && entry->keyword[where_in_keyword+1]!='\0';
- current=(struct Generator_Node*)current->output.delta[get_ch(entry->keyword+where_in_keyword,1)],++where_in_keyword)
- {
- if(current->output.delta[get_ch(entry->keyword+where_in_keyword,1)]==NULL)
- current->output.delta[get_ch(entry->keyword+where_in_keyword,1)]=(struct Automata_Node*)get_generator_node(null_str,no_type_str,automata_no_action_str);
- }
-
- if(current->output.delta[get_ch(entry->keyword+where_in_keyword,1)]==NULL)
- current->output.delta[get_ch(entry->keyword+where_in_keyword,1)]=(struct Automata_Node*)get_generator_node(entry->data_string,entry->kw_string,entry->action_string);
- else
- {
- ((struct Generator_Node*)current->output.delta[get_ch(entry->keyword+where_in_keyword,1)])->kw_string=entry->kw_string;
- ((struct Generator_Node*)current->output.delta[get_ch(entry->keyword+where_in_keyword,1)])->data_string=entry->data_string;
- ((struct Generator_Node*)current->output.delta[get_ch(entry->keyword+where_in_keyword,1)])->action_string=entry->action_string;
- }
-
- return (struct Generator_Node*)current->output.delta[get_ch(entry->keyword+where_in_keyword,1)];
- }
- struct Generator_Node* get_generator_node(const char *data_string,const char *kw_string,const char *action_string)
- {
- struct Generator_Node *ret;
-
- ret=wonky_calloc(1,sizeof(struct Generator_Node));
- ret->node_number=NODE_NOT_NUMBERED;
- ret->data_string=data_string;
- ret->kw_string=kw_string;
- ret->action_string=action_string;
-
- return ret;
- }
- /*requires that there are no loops in the automata*/
- void add_id_nodes(struct Generator_Node *node)
- {
- struct Generator_Node *id_node;
- struct Queue *node_queue;
- struct Generator_Node *current_node;
- _Bool push_nodes;
-
- id_node=get_generator_node(null_str,id_type_str,automata_dispense_token_str);
-
- node_queue=wonky_malloc(sizeof(struct Queue));
- Queue_Init(node_queue);
-
- Queue_Push(node_queue,id_node);
-
- connect_node(node,id_node,node_queue,CHAR_a,CHAR_z,1);
- connect_node(node,id_node,node_queue,CHAR_A,CHAR_Z,1);
- connect_node(node,id_node,node_queue,CHAR_UNDERSCORE,CHAR_UNDERSCORE,1);
-
- while(node_queue->size>0)
- {
- current_node=Queue_Pop(node_queue);
-
- if(current_node->kw_string==no_type_str)
- {
- current_node->kw_string=id_type_str;
- current_node->action_string=automata_dispense_token_str;
- push_nodes=1;
-
- }else
- {
- push_nodes=0;
- }
- connect_node(current_node,id_node,node_queue,CHAR_a,CHAR_z,push_nodes);
- connect_node(current_node,id_node,node_queue,CHAR_A,CHAR_Z,push_nodes);
- connect_node(current_node,id_node,node_queue,CHAR_0,CHAR_9,push_nodes);
- connect_node(current_node,id_node,node_queue,CHAR_UNDERSCORE,CHAR_UNDERSCORE,push_nodes);
-
- }
-
- wonky_assert(node_queue->size==0);
- wonky_free(node_queue);
-
- }
- void add_number_nodes(struct Generator_Node *node)
- {
- struct Generator_Node *oct_hex_split;
-
- add_integer_suffix(add_decimal_number_nodes(node),"KW_LONG_DECIMAL_CONSTANT","KW_LONG_LONG_DECIMAL_CONSTANT");
-
- oct_hex_split=get_generator_node(null_str,"KW_OCTAL_CONSTANT",automata_dispense_token_str);
- connect_node(node,oct_hex_split,NULL,CHAR_0,CHAR_0,0);
- add_integer_suffix(oct_hex_split,"KW_LONG_OCTAL_CONSTANT","KW_LONG_LONG_OCTAL_CONSTANT");
-
- add_integer_suffix(add_hexadecimal_number_nodes(oct_hex_split),"KW_LONG_HEXADECIMAL_CONSTANT","KW_LONG_LONG_HEXADECIMAL_CONSTANT");
- add_integer_suffix(add_octal_number_nodes(oct_hex_split),"KW_LONG_OCTAL_CONSTANT","KW_LONG_LONG_OCTAL_CONSTANT");
- }
- struct Generator_Node* add_decimal_number_nodes(struct Generator_Node *node)
- {
- struct Generator_Node *decimal_node;
- decimal_node=get_generator_node(null_str,"KW_DECIMAL_CONSTANT",automata_dispense_token_str);
- connect_node(node,decimal_node,NULL,CHAR_1,CHAR_9,0);
- connect_node(decimal_node,decimal_node,NULL,CHAR_0,CHAR_9,0);
- add_finishing_float_nodes(decimal_node,1);
- return decimal_node;
- }
- /*the leading 0 has been taken from add_number_nodes*/
- struct Generator_Node* add_hexadecimal_number_nodes(struct Generator_Node *node)
- {
- struct Generator_Node *hexadecimal_node_start;
- struct Generator_Node *hexadecimal_node;
-
- hexadecimal_node_start=get_generator_node(null_str,no_type_str,automata_no_action_str);
-
- connect_node(node,hexadecimal_node_start,NULL,CHAR_x,CHAR_x,0);
- connect_node(node,hexadecimal_node_start,NULL,CHAR_X,CHAR_X,0);
-
- hexadecimal_node=get_generator_node(null_str,"KW_HEXADECIMAL_CONSTANT",automata_dispense_token_str);
-
- connect_node(hexadecimal_node_start,hexadecimal_node,NULL,CHAR_0,CHAR_9,0);
- connect_node(hexadecimal_node_start,hexadecimal_node,NULL,CHAR_a,CHAR_f,0);
- connect_node(hexadecimal_node_start,hexadecimal_node,NULL,CHAR_A,CHAR_F,0);
-
- connect_node(hexadecimal_node,hexadecimal_node,NULL,CHAR_0,CHAR_9,0);
- connect_node(hexadecimal_node,hexadecimal_node,NULL,CHAR_a,CHAR_f,0);
- connect_node(hexadecimal_node,hexadecimal_node,NULL,CHAR_A,CHAR_F,0);
-
-
- add_finishing_hexadecimal_float_nodes(hexadecimal_node,1);
- return hexadecimal_node;
- }
- struct Generator_Node* add_octal_number_nodes(struct Generator_Node *node)
- {
- struct Generator_Node *octal_node;
-
- add_finishing_float_nodes(node,1);
-
- octal_node=get_generator_node(null_str,"KW_OCTAL_CONSTANT",automata_dispense_token_str);
- connect_node(node,octal_node,NULL,CHAR_0,CHAR_7,0);
- connect_node(octal_node,octal_node,NULL,CHAR_0,CHAR_7,0);
-
- add_finishing_float_nodes(octal_node,1);
- return octal_node;
- }
- void add_integer_suffix(struct Generator_Node *tail,const char *l,const char *ll)
- {
- struct Generator_Node *long_node;
- struct Generator_Node *long_long_node;
-
- long_node=get_generator_node(null_str,l,automata_dispense_token_str);
- long_long_node=get_generator_node(null_str,ll,automata_dispense_token_str);
-
- connect_node(tail,long_node,NULL,CHAR_l,CHAR_l,0);
- connect_node(tail,long_node,NULL,CHAR_L,CHAR_L,0);
-
- connect_node(long_node,long_long_node,NULL,CHAR_l,CHAR_l,0);
- connect_node(long_node,long_long_node,NULL,CHAR_L,CHAR_L,0);
- }
- void add_string_char_nodes(struct Generator_Node *node)
- {
- struct Generator_Node *prefixed_string_node;
-
- prefixed_string_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
- connect_node(node,prefixed_string_node,NULL,CHAR_L,CHAR_L,0);
-
- add_string_char_nodes_inner(prefixed_string_node,"KW_WIDE_STRING","KW_WIDE_CHAR_CONSTANT");
- add_string_char_nodes_inner(node,"KW_STRING","KW_CHAR_CONSTANT");
-
- }
- void add_string_char_nodes_inner(struct Generator_Node *node,const char *str_kw,const char *char_kw)
- {
- struct Generator_Node *inner_string_node;
- struct Generator_Node *ending_string_node;
-
- struct Generator_Node *inner_char_node;
- struct Generator_Node *ending_char_node;
-
- inner_string_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
- ending_string_node=get_generator_node(null_str,str_kw,automata_dispense_token_str);
-
- inner_char_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
- ending_char_node=get_generator_node(null_str,char_kw,automata_dispense_token_str);
-
-
- connect_node(node,inner_char_node,NULL,CHAR_SINGLE_QUOTE,CHAR_SINGLE_QUOTE,0);
- connect_node(inner_char_node,ending_char_node,NULL,CHAR_SINGLE_QUOTE,CHAR_SINGLE_QUOTE,0);
- connect_node(inner_char_node,inner_char_node,NULL,0,CHAR_ENUM_END,0);
-
- connect_node(node,inner_string_node,NULL,CHAR_DOUBLE_QUOTE,CHAR_DOUBLE_QUOTE,0);
- connect_node(inner_string_node,ending_string_node,NULL,CHAR_DOUBLE_QUOTE,CHAR_DOUBLE_QUOTE,0);
- connect_node(inner_string_node,inner_string_node,NULL,0,CHAR_ENUM_END,0);
- }
- void add_finishing_float_nodes(struct Generator_Node *node,_Bool has_read_digits)
- {
- struct Generator_Node *hold;
- struct Generator_Node *hold2;
- hold=add_fractional_constant(node,has_read_digits);
- hold2=add_exponent_part(hold);
- add_float_suffix(hold,"KW_FLOAT_DECIMAL_CONSTANT","KW_LONG_DOUBLE_DECIMAL_CONSTANT");
- add_float_suffix(hold2,"KW_FLOAT_DECIMAL_CONSTANT","KW_LONG_DOUBLE_DECIMAL_CONSTANT");
-
- }
- void add_finishing_hexadecimal_float_nodes(struct Generator_Node *node,_Bool has_read_digits)
- {
- struct Generator_Node *hold;
- struct Generator_Node *hold2;
-
- hold=add_hexadecimal_fractional_constant(node,has_read_digits);
- hold2=add_hexadecimal_exponent_part(hold);
- add_float_suffix(hold,"KW_FLOAT_HEXADECIMAL_CONSTANT","KW_LONG_DOUBLE_HEXADECIMAL_CONSTANT");
- add_float_suffix(hold2,"KW_FLOAT_HEXADECIMAL_CONSTANT","KW_LONG_DOUBLE_HEXADECIMAL_CONSTANT");
- }
- void add_float_suffix(struct Generator_Node *node,const char *f,const char *l)
- {
- struct Generator_Node *float_node;
- struct Generator_Node *long_double_node;
-
- float_node=get_generator_node(null_str,f,automata_dispense_token_str);
- long_double_node=get_generator_node(null_str,l,automata_dispense_token_str);
-
- connect_node(node,float_node,NULL,CHAR_f,CHAR_f,0);
- connect_node(node,float_node,NULL,CHAR_F,CHAR_F,0);
-
- connect_node(node,long_double_node,NULL,CHAR_l,CHAR_l,0);
- connect_node(node,long_double_node,NULL,CHAR_L,CHAR_L,0);
- }
- struct Generator_Node* add_fractional_constant(struct Generator_Node *node,_Bool has_read_digits)
- {
- struct Generator_Node *dot_node;
- struct Generator_Node *digit_node;
-
- if(has_read_digits)
- {
- dot_node=get_generator_node(null_str,"KW_DOUBLE_DECIMAL_CONSTANT",automata_dispense_token_str);
- connect_node(node,dot_node,NULL,CHAR_DOT,CHAR_DOT,0);
- connect_node(dot_node,dot_node,NULL,CHAR_0,CHAR_9,0);
-
- return dot_node;
- }else
- {
- dot_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
- digit_node=get_generator_node(null_str,"KW_DOUBLE_DECIMAL_CONSTANT",automata_dispense_token_str);
- connect_node(node,dot_node,NULL,CHAR_DOT,CHAR_DOT,0);
- connect_node(dot_node,digit_node,NULL,CHAR_0,CHAR_9,0);
- connect_node(digit_node,digit_node,NULL,CHAR_0,CHAR_9,0);
- return digit_node;
- }
-
-
- }
- struct Generator_Node* add_hexadecimal_fractional_constant(struct Generator_Node *node,_Bool has_read_digits)
- {
- struct Generator_Node *dot_node;
- struct Generator_Node *digit_node;
-
- if(has_read_digits)
- {
- dot_node=get_generator_node(null_str,"KW_DOUBLE_HEXADECIMAL_CONSTANT",automata_dispense_token_str);
- connect_node(node,dot_node,NULL,CHAR_DOT,CHAR_DOT,0);
- connect_node(dot_node,dot_node,NULL,CHAR_0,CHAR_9,0);
- connect_node(dot_node,dot_node,NULL,CHAR_a,CHAR_f,0);
- connect_node(dot_node,dot_node,NULL,CHAR_A,CHAR_F,0);
-
- return dot_node;
- }else
- {
- dot_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
- digit_node=get_generator_node(null_str,"KW_DOUBLE_HEXADECIMAL_CONSTANT",automata_dispense_token_str);
- connect_node(node,dot_node,NULL,CHAR_DOT,CHAR_DOT,0);
- connect_node(dot_node,digit_node,NULL,CHAR_0,CHAR_9,0);
- connect_node(dot_node,digit_node,NULL,CHAR_a,CHAR_f,0);
- connect_node(dot_node,digit_node,NULL,CHAR_A,CHAR_F,0);
-
- connect_node(digit_node,digit_node,NULL,CHAR_0,CHAR_9,0);
- connect_node(digit_node,digit_node,NULL,CHAR_a,CHAR_f,0);
- connect_node(digit_node,digit_node,NULL,CHAR_A,CHAR_F,0);
- return digit_node;
- }
-
-
- }
- struct Generator_Node* add_exponent_part(struct Generator_Node *node)
- {
- struct Generator_Node *digit_node;
- struct Generator_Node *e_node;
-
- e_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
- digit_node=get_generator_node(null_str,"KW_DOUBLE_DECIMAL_CONSTANT",automata_dispense_token_str);
-
- connect_node(node,e_node,NULL,CHAR_e,CHAR_e,0);
- connect_node(node,e_node,NULL,CHAR_E,CHAR_E,0);
-
- connect_node(e_node,digit_node,NULL,CHAR_PLUS,CHAR_PLUS,0);
- connect_node(e_node,digit_node,NULL,CHAR_MINUS,CHAR_MINUS,0);
- connect_node(e_node,digit_node,NULL,CHAR_0,CHAR_9,0);
-
- connect_node(digit_node,digit_node,NULL,CHAR_0,CHAR_9,0);
-
- return digit_node;
- }
- struct Generator_Node* add_hexadecimal_exponent_part(struct Generator_Node *node)
- {
- struct Generator_Node *digit_node;
- struct Generator_Node *p_node;
-
- p_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
- digit_node=get_generator_node(null_str,"KW_DOUBLE_HEXADECIMAL_CONSTANT",automata_dispense_token_str);
-
- connect_node(node,p_node,NULL,CHAR_p,CHAR_p,0);
- connect_node(node,p_node,NULL,CHAR_P,CHAR_P,0);
-
- connect_node(p_node,digit_node,NULL,CHAR_PLUS,CHAR_PLUS,0);
- connect_node(p_node,digit_node,NULL,CHAR_MINUS,CHAR_MINUS,0);
- connect_node(p_node,digit_node,NULL,CHAR_0,CHAR_9,0);
-
- connect_node(digit_node,digit_node,NULL,CHAR_0,CHAR_9,0);
-
- return digit_node;
- }
- void connect_node(struct Generator_Node *node,struct Generator_Node *target_node,struct Queue *node_queue,enum Source_Chars begin,enum Source_Chars end,_Bool push_nodes)
- {
- int i;
- for(i=begin;i<=end;++i)
- if(node->output.delta[i]==NULL)
- node->output.delta[i]=(struct Automata_Node*)target_node;
- else if(push_nodes)
- Queue_Push(node_queue,node->output.delta[i]);
-
- }
- #endif
F diff --git a/src/frontend/lex/automatas/generator/generator.h b/src/frontend/lex/automatas/generator/generator.h deleted file mode 100644 --- a/src/frontend/lex/automatas/generator/generator.h +++ /dev/null
- #ifndef WONKY_AUTOMATA_GENERATOR_H
- #define WONKY_AUTOMATA_GENERATOR_H WONKY_AUTOMATA_GENERATOR_H
- #include <generator.hh>
-
- #include <stdio.h>
- #include <automata.h>
- #include <queue.h>
- #include <wonky_malloc.h>
- #include <wonky_assert.h>
- #include <keyword_list.h>
-
-
- struct Generator_Node
- {
- ssize_t node_number;
- const char *data_string;
- const char *kw_string;
- const char *action_string;
- struct Automata_Node output;
- };
-
-
- void print_automatas();
- void print_automata(struct Generator_Node *root,const char *name,FILE *out);
- void print_automata_nodes(struct Generator_Node *node,const char *name,FILE *out);
-
- struct Generator_Node* make_chonky();
- struct Generator_Node* make_generator(const struct Keyword_Entry *keywords,size_t number_of_keywords);
- struct Generator_Node* insert_keyword(struct Generator_Node *node,const struct Keyword_Entry *entry);
- struct Generator_Node* get_generator_node(const char *data_string,const char *kw_string,const char *action_string);
- void add_id_nodes(struct Generator_Node *node);
-
- void add_number_nodes(struct Generator_Node *node);
-
- /*these return the last generated node so we can add L and LL to them*/
- struct Generator_Node* add_decimal_number_nodes(struct Generator_Node *node);
- struct Generator_Node* add_hexadecimal_number_nodes(struct Generator_Node *node);
- struct Generator_Node* add_octal_number_nodes(struct Generator_Node *node);
-
- void add_integer_suffix(struct Generator_Node *tail,const char *l,const char *ll);
-
- void add_string_char_nodes(struct Generator_Node *node);
- void add_string_char_nodes_inner(struct Generator_Node *node,const char *str_kw,const char *char_kw);
-
- void add_finishing_float_nodes(struct Generator_Node *node,_Bool has_read_digits);
- void add_finishing_hexadecimal_float_nodes(struct Generator_Node *node,_Bool has_read_digits);
- void add_float_suffix(struct Generator_Node *node,const char *f,const char *l);
-
- struct Generator_Node* add_fractional_constant(struct Generator_Node *node,_Bool has_read_digits);
- struct Generator_Node* add_hexadecimal_fractional_constant(struct Generator_Node *node,_Bool has_read_digits);
-
- struct Generator_Node* add_exponent_part(struct Generator_Node *node);
- struct Generator_Node* add_hexadecimal_exponent_part(struct Generator_Node *node);
-
-
- void connect_node(struct Generator_Node *node,struct Generator_Node *target_node,struct Queue *node_queue,enum Source_Chars begin,enum Source_Chars end,_Bool push_nodes);
-
- int main();
- static const ssize_t NODE_NOT_NUMBERED=-1;
-
- static const char *null_str="NULL";
- static const char *no_type_str="KW_NOTYPE";
- static const char *id_type_str="KW_ID";
-
- static const char *automata_no_action_str="AUTOMATA_ACTION_NO_ACTION";
- static const char *automata_dispense_token_str="AUTOMATA_ACTION_DISPENSE_TOKEN";
-
- #endif
F diff --git a/src/frontend/lex/automatas/generator/generator.hh b/src/frontend/lex/automatas/generator/generator.hh deleted file mode 100644 --- a/src/frontend/lex/automatas/generator/generator.hh +++ /dev/null
- #ifndef WONKY_AUTOMATA_GENERATOR_HH
- #define WONKY_AUTOMATA_GENERATOR_HH WONKY_AUTOMATA_GENERATOR_HH
-
-
- struct Generator_Node;
-
- #endif
F diff --git a/src/frontend/lex/automatas/generator/keyword_list.c b/src/frontend/lex/automatas/generator/keyword_list.c deleted file mode 100644 --- a/src/frontend/lex/automatas/generator/keyword_list.c +++ /dev/null
- #ifndef WONKY_KEYWORD_LIST_C
- #define WONKY_KEYWORD_LIST_C WONKY_KEYWORD_LIST_C
- #include <keyword_list.h>
-
-
- struct Keyword_Entry chonky_keywords[]
- =
- {
- {
- .keyword="auto",
- .kw_string="KW_AUTO",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="do",
- .kw_string="KW_DO",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="double",
- .kw_string="KW_DOUBLE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="int",
- .kw_string="KW_INT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="struct",
- .kw_string="KW_STRUCT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="break",
- .kw_string="KW_BREAK",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="else",
- .kw_string="KW_ELSE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="long",
- .kw_string="KW_LONG",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="switch",
- .kw_string="KW_SWITCH",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="case",
- .kw_string="KW_CASE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="enum",
- .kw_string="KW_ENUM",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="register",
- .kw_string="KW_REGISTER",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="typedef",
- .kw_string="KW_TYPEDEF",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="char",
- .kw_string="KW_CHAR",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="extern",
- .kw_string="KW_EXTERN",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="return",
- .kw_string="KW_RETURN",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="union",
- .kw_string="KW_UNION",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="const",
- .kw_string="KW_CONST",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="float",
- .kw_string="KW_FLOAT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="short",
- .kw_string="KW_SHORT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="unsigned",
- .kw_string="KW_UNSIGNED",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="continue",
- .kw_string="KW_CONTINUE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="for",
- .kw_string="KW_FOR",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="signed",
- .kw_string="KW_SIGNED",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="void",
- .kw_string="KW_VOID",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="default",
- .kw_string="KW_DEFAULT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="goto",
- .kw_string="KW_GOTO",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="sizeof",
- .kw_string="KW_SIZEOF",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="volatile",
- .kw_string="KW_VOLATILE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="if",
- .kw_string="KW_IF",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="static",
- .kw_string="KW_STATIC",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="while",
- .kw_string="KW_WHILE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="!",
- .kw_string="KW_EXCLAMATION",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="%",
- .kw_string="KW_PERCENT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="&",
- .kw_string="KW_AND",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="&&",
- .kw_string="KW_AND_AND",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="(",
- .kw_string="KW_OPEN_NORMAL",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword=")",
- .kw_string="KW_CLOSE_NORMAL",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="*",
- .kw_string="KW_STAR",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="+",
- .kw_string="KW_PLUS",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword=",",
- .kw_string="KW_COMMA",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="-",
- .kw_string="KW_MINUS",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword=".",
- .kw_string="KW_DOT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="->",
- .kw_string="KW_ARROW",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword=":",
- .kw_string="KW_COLUMN",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword=";",
- .kw_string="KW_SEMI_COLUMN",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="<",
- .kw_string="KW_LESS",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="=",
- .kw_string="KW_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="==",
- .kw_string="KW_EQEQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword=">",
- .kw_string="KW_MORE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="?",
- .kw_string="KW_QUESTION",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="[",
- .kw_string="KW_OPEN_SQUARE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="]",
- .kw_string="KW_CLOSE_SQUARE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="^",
- .kw_string="KW_HAT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="{",
- .kw_string="KW_OPEN_CURLY",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="}",
- .kw_string="KW_CLOSE_CURLY",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="|",
- .kw_string="KW_PIPE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="||",
- .kw_string="KW_PIPE_PIPE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="~",
- .kw_string="KW_TILDE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="++",
- .kw_string="KW_PLUSPLUS",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="--",
- .kw_string="KW_MINUSMINUS",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword=">>",
- .kw_string="KW_SHIFT_RIGHT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="<<",
- .kw_string="KW_SHIFT_LEFT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="<=",
- .kw_string="KW_LESS_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword=">=",
- .kw_string="KW_MORE_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="!=",
- .kw_string="KW_NOT_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="+=",
- .kw_string="KW_PLUS_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="-=",
- .kw_string="KW_MINUS_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="*=",
- .kw_string="KW_STAR_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="%=",
- .kw_string="KW_PERCENT_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="/=",
- .kw_string="KW_DIV_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="<<=",
- .kw_string="KW_SHIFT_LEFT_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword=">>=",
- .kw_string="KW_SHIFT_RIGHT_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="&=",
- .kw_string="KW_AND_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="^=",
- .kw_string="KW_HAT_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="|=",
- .kw_string="KW_PIPE_EQ",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="#",
- .kw_string="KW_HASHTAG",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="##",
- .kw_string="KW_HASHTAG_HASHTAG",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="...",
- .kw_string="KW_ELIPSIS",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="/",
- .kw_string="KW_DIV",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="inline",
- .kw_string="KW_INLINE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="restrict",
- .kw_string="KW_RESTRICT",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="_Bool",
- .kw_string="KW_BOOL",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="_Complex",
- .kw_string="KW_COMPLEX",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="_Imaginary",
- .kw_string="KW_IMAGINARY",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="<:",
- .kw_string="KW_OPEN_SQUARE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword=":>",
- .kw_string="KW_CLOSE_SQUARE",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="<%",
- .kw_string="KW_OPEN_CURLY",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="%>",
- .kw_string="KW_CLOSE_CURLY",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="%:",
- .kw_string="KW_HASHTAG",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="%:%:",
- .kw_string="KW_HASHTAG_HASHTAG",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="NULL"
- },
- {
- .keyword="defined",
- .kw_string="KW_ID",
- .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
- .data_string="&defined_id"
- },
- };
-
- size_t number_of_chonky_keywords=sizeof(chonky_keywords)/sizeof(chonky_keywords[0]);
- #endif
F diff --git a/src/frontend/lex/automatas/generator/keyword_list.h b/src/frontend/lex/automatas/generator/keyword_list.h deleted file mode 100644 --- a/src/frontend/lex/automatas/generator/keyword_list.h +++ /dev/null
- #ifndef WONKY_KEYWORD_LIST_H
- #define WONKY_KEYWORD_LIST_H WONKY_KEYWORD_LIST_H
- #include <keyword_list.hh>
-
- #include <automata.h>
- #include <wonky_malloc.h>
- #include <generator.h>
-
- struct Keyword_Entry
- {
- const char *keyword;
- const char *kw_string;
- const char *action_string;
- const char *data_string;
- };
-
- extern struct Keyword_Entry chonky_keywords[];
- extern size_t number_of_chonky_keywords;
-
- #endif
F diff --git a/src/frontend/lex/automatas/generator/keyword_list.hh b/src/frontend/lex/automatas/generator/keyword_list.hh deleted file mode 100644 --- a/src/frontend/lex/automatas/generator/keyword_list.hh +++ /dev/null
- #ifndef WONKY_KEYWORD_LIST_HH
- #define WONKY_KEYWORD_LIST_HH WONKY_KEYWORD_LIST_HH
-
- struct Keyword_Entry;
-
- #endif
F diff --git a/src/frontend/lex/lex_preprocessing_directive.c b/src/frontend/lex/lex_preprocessing_directive.c new file mode 100644 --- /dev/null +++ b/src/frontend/lex/lex_preprocessing_directive.c
+ #ifndef WONKY_LEX_PREPROCESSING_DIRECTIVE_C
+ #define WONKY_LEX_PREPROCESSING_DIRECTIVE_C WONKY_LEX_PREPROCESSING_DIRECTIVE_C
+ #include <lex_preprocessing_directive.h>
+
+ /*we have skipped the leading #*/
+ /*
+ #include string
+ #include <qchar>
+ #define [ id(list) replacement
+ #undef [ id ]
+ #if
+ #ifdef
+ #ifndef
+ #
+
+ #elif
+ #else
+ #endif
+
+
+ #error
+ #pragma
+ #line number [string]
+
+ */
+ void parse_preproc_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct token *hold;
+ /*TODO fix!*/
+ /*hold=get_next_token(src,&chonky_jr[0],0);*/
+ hold=get_next_token(src,&chonky[0],0);
+ switch(hold->type)
+ {
+ case PKW_INCLUDE:
+ wonky_free(hold);
+ parse_include_line(src,translation_data);
+ return;
+ case PKW_DEFINE:
+ wonky_free(hold);
+ parse_define_line(src,translation_data);
+ return;
+ case PKW_IF:
+ wonky_free(hold);
+ parse_preproc_if_line(src,translation_data);
+ return;
+ case PKW_IFDEF:
+ wonky_free(hold);
+ parse_preproc_ifdef_line(src,translation_data);
+ return;
+ case PKW_IFNDEF:
+ wonky_free(hold);
+ parse_preproc_ifndef_line(src,translation_data);
+ return;
+ case PKW_UNDEF:
+ wonky_free(hold);
+ parse_preproc_undef_line(src,translation_data);
+ return;
+ case PKW_ENDIF:
+ wonky_free(hold);
+ push_lexing_error("unmatched endif",src,translation_data);
+ return;
+ case PKW_ELSE:
+ wonky_free(hold);
+ push_lexing_error("unmatched else",src,translation_data);
+ return;
+ case PKW_ELIF:
+ wonky_free(hold);
+ push_lexing_error("unmatched elif",src,translation_data);
+ return;
+ case PKW_LINE:
+ wonky_free(hold);
+ parse_preproc_line_line(src,translation_data);
+ return;
+ case PKW_ERROR:
+ wonky_free(hold);
+ parse_preproc_error_line(src,translation_data);
+ return;
+ default:
+ /*TODO error*/
+ wonky_free(hold);
+ push_lexing_error("expected a preprocessing directive",src,translation_data);
+ return;
+
+ }
+ }
+ void parse_include_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct token *hold;
+ hold=get_next_token(src,&chonky[0],0);
+ if(hold->type==KW_STRING)
+ {
+ char *where_to_search[]={src->src_name->base,NULL};
+ struct Source_File *hold_file;
+
+ hold->data[hold->data_size-1]='\0';
+ hold->data_size-=2;
+ ++hold->data;
+ handle_splicing(hold);
+
+
+ /*search in the directory of the file from which we include*/
+ hold_file=get_source_file(hold->data,where_to_search);
+ /*fallback to well known locations == <>*/
+ if(hold_file==NULL)
+ {
+ hold_file=get_source_file(hold->data,well_known_locations_base);
+ if(hold_file==NULL)
+ {
+ /*TODO error*/
+ push_lexing_error("file in include directive not found",src,translation_data);
+ wonky_free(hold);
+ return;
+ }
+ }
+ lex_program(translation_data,hold_file);
+ wonky_free(hold);
+ }else if(hold->type==KW_LESS)/*hack*/
+ {
+ struct Source_File *hold_file;
+ ++hold->data;
+ while(src->src[src->where_in_src]!='>' && src->where_in_src<src->src_size)
+ {
+ ++src->where_in_src;
+ ++hold->data_size;
+ }
+ if(src->where_in_src==src->src_size)
+ {
+ /*TODO error*/
+ wonky_free(hold);
+ return;
+ }
+ /*skip the >*/
+ ++src->where_in_src;
+ hold->data[hold->data_size-1]='\0';
+ handle_splicing(hold);
+
+ hold_file=get_source_file(hold->data,well_known_locations_base);
+ if(hold_file==NULL)
+ {
+ /*TODO error*/
+ push_lexing_error("file in include directive not found",src,translation_data);
+ wonky_free(hold);
+ return;
+ }
+
+ lex_program(translation_data,hold_file);
+ wonky_free(hold);
+
+ }else
+ {
+ /*TODO error*/
+ push_lexing_error("include error",src,translation_data);
+ wonky_free(hold);
+ return;
+ }
+
+
+ chase_new_line(src,translation_data);
+ }
+
+ /*skipped # and 'define'*/
+ void parse_define_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct token *hold_token;
+ struct token *macro_name;
+ struct define_directive *new_macro;
+ struct Queue *hold_tokens;
+ size_t number_of_arguments=0;
+ int *hold_index;
+
+
+ macro_name=get_next_token(src,&chonky[0],0);
+ if(macro_name->type!=KW_ID)
+ {
+ wonky_free(macro_name);
+ push_lexing_error("expected id after #define",src,translation_data);
+ return;
+ }
+
+ new_macro=get_define_directive(macro_name);
+ /*white space*/
+ hold_token=get_next_token(src,&chonky[0],0);
+ if(hold_token->type==KW_OPEN_NORMAL)
+ {
+ wonky_free(hold_token);
+ while(1)
+ {
+ hold_token=get_next_token(src,&chonky[0],0);
+ if(hold_token->type!=KW_ID)
+ {
+ push_lexing_error("expected id in define argument list",src,translation_data);
+ wonky_free(hold_token);
+ break;
+ }
+ hold_index=wonky_malloc(sizeof(int));
+ *hold_index=number_of_arguments;
+ ++number_of_arguments;
+ Map_Push(new_macro->arguments,hold_token->data,hold_token->data_size,hold_index);
+ wonky_free(hold_token);
+ hold_token=get_next_token(src,&chonky[0],0);
+ if(hold_token->type!=KW_COMMA)
+ {
+ if(hold_token->type==KW_CLOSE_NORMAL)
+ {
+ wonky_free(hold_token);
+ break;
+ }else
+ {
+ push_lexing_error("expected ',' in define argument list",src,translation_data);
+ wonky_free(hold_token);
+ break;
+ }
+ }
+ wonky_free(hold_token);
+ }
+
+ }else if(hold_token->type==KW_NOTYPE)
+ {
+ wonky_free(hold_token);
+ }
+
+ /*push things*/
+
+ hold_tokens=translation_data->tokens;
+ translation_data->tokens=new_macro->macro_tokens;
+
+ new_macro->number_of_arguments=number_of_arguments;
+ /*there is something in hold_token*/
+ while( (hold_token=get_next_token(src,&chonky[0],0))->type != KW_NOTYPE)
+ {
+ expand_macro(hold_token,src,translation_data);
+ }
+
+ /*removing the notype token*/
+ wonky_free(hold_token);
+
+ translation_data->tokens=hold_tokens;
+ /*push the directive into the macro map*/
+ Map_Push(translation_data->macros,macro_name->data,macro_name->data_size,new_macro);
+ //wonky_free(macro_name);
+ chase_new_line(src,translation_data);
+
+ }
+ /*
+ id[(list)] tokens \n
+ */
+ struct define_directive* get_define_directive(struct token* macro_name)
+ {
+ struct define_directive *ret;
+ ret=wonky_malloc(sizeof(struct token));
+ ret->macro_name=macro_name;
+
+ ret->macro_tokens=wonky_malloc(sizeof(struct Queue));
+ Queue_Init(ret->macro_tokens);
+
+ ret->arguments=wonky_malloc(sizeof(struct Map));
+ Map_Init(ret->arguments);
+
+ ret->number_of_arguments=0;
+
+ return ret;
+ }
+
+ /*returns an array of queues*/
+ struct Queue* make_define_argument_list(size_t number_of_arguments)
+ {
+ size_t i;
+ struct Queue *ret;
+
+ if(number_of_arguments==0)
+ return NULL;
+
+ ret=wonky_malloc(sizeof(struct Queue)*number_of_arguments);
+
+ for(i=0;i<number_of_arguments;++i)
+ {
+ Queue_Init(ret+i);
+ }
+ return ret;
+ }
+ void delete_define_argument_list(size_t number_of_arguments,struct Queue *args)
+ {
+ if(number_of_arguments==0)
+ {
+ wonky_assert(args==NULL);
+ return;
+ }
+ flush_macro_arguments(number_of_arguments,args);
+ wonky_free(args);
+ }
+
+ void expand_macro_argument(struct Queue *replacement_tokens,struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct Queue_Node *it;
+ struct token *hold_token;
+ for(it=replacement_tokens->first;it!=NULL;it=it->prev)
+ {
+ hold_token=copy_token((struct token*)it->data);
+ hold_token->line=src->which_row;
+ hold_token->column=src->which_column;
+ Queue_Push(translation_data->tokens,hold_token);
+ //Queue_Push(translation_data->tokens,copy_token((struct token*)it->data));
+ }
+ }
+ void load_macro_arguments(struct Queue *args,size_t number_of_arguments,struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct token *hold;
+ struct Queue *hack;
+ size_t i;
+ size_t j;
+
+ if(number_of_arguments==0)
+ return;
+
+ hold=get_next_token(src,&chonky[0],1);
+ if(hold->type!=KW_OPEN_NORMAL)
+ {
+ push_lexing_error("expected '(' in macro expansion",src,translation_data);
+ wonky_free(hold);
+ return;
+ }
+ wonky_free(hold);
+
+ hack=translation_data->tokens;
+ for(i=0;i<number_of_arguments-1;++i)
+ {
+ translation_data->tokens=args+i;
+ for(
+ hold=get_next_token(src,&chonky[0],1),j=0;
+ hold->type!=KW_COMMA && hold->type!=KW_NOTYPE;
+ hold=get_next_token(src,&chonky[0],1),++j
+ )
+ {
+ expand_macro(hold,src,translation_data);
+ }
+ if(hold->type==KW_NOTYPE)
+ {
+ push_lexing_error("expected ',' in macro argument list",src,translation_data);
+ wonky_free(hold);
+ goto cleanup;
+ }
+ if(j==0)
+ {
+ push_lexing_error("expected argument in macro argument list",src,translation_data);
+ wonky_free(hold);
+ goto cleanup;
+ }
+
+ }
+ translation_data->tokens=args+i;
+ for(
+ hold=get_next_token(src,&chonky[0],1),j=0;
+ hold->type!=KW_CLOSE_NORMAL;
+ hold=get_next_token(src,&chonky[0],1),++j
+ )
+ {
+ if(hold->type==KW_NOTYPE)
+ {
+ push_lexing_error("expected ')' in macro argument list",src,translation_data);
+ wonky_free(hold);
+ goto cleanup;
+ }
+ expand_macro(hold,src,translation_data);
+ }
+ if(j==0)
+ {
+ push_lexing_error("expected argument in macro argument list",src,translation_data);
+ wonky_free(hold);
+ }
+
+ cleanup:
+ translation_data->tokens=hack;
+
+
+ }
+ void flush_macro_arguments(size_t number_of_arguments,struct Queue *args)
+ {
+ size_t i;
+ for(i=0;i<number_of_arguments;++i)
+ {
+ while(args[i].size>0)
+ wonky_free(Queue_Pop(args+i));
+ }
+ }
+ /*macro name token is wonky_freed on expansion , if it is not a macro name it is pushed into token queue*/
+ void expand_macro(struct token* macro_name,struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct define_directive *hold=NULL;
+ struct token *hold_token;
+ int *index;
+ struct Queue_Node *it;
+ struct Queue *argument_list;
+
+ if(macro_name->type==KW_ID)
+ hold=Map_Check(translation_data->macros,macro_name->data,macro_name->data_size);
+ if(hold!=NULL)
+ {
+ wonky_free(macro_name);
+ argument_list=make_define_argument_list(hold->number_of_arguments);
+ load_macro_arguments(argument_list,hold->number_of_arguments,src,translation_data);
+ if(translation_data->errors->size>0)
+ {
+ delete_define_argument_list(hold->number_of_arguments,argument_list);
+ return;
+ }
+
+
+ for(it=hold->macro_tokens->first;it!=NULL;it=it->prev)
+ {
+ hold_token=(struct token*)it->data;
+ index=Map_Check(hold->arguments,hold_token->data,hold_token->data_size);
+ if(index!=NULL)
+ {
+ expand_macro_argument(argument_list+*index,src,translation_data);
+ }else
+ {
+ hold_token=copy_token(hold_token);
+
+ hold_token->line=src->which_row;
+ hold_token->column=src->which_column;
+
+ wonky_assert(is_valid_token(hold_token));
+
+ Queue_Push(translation_data->tokens,hold_token);
+ }
+ }
+ delete_define_argument_list(hold->number_of_arguments,argument_list);
+ }else
+ {
+ /*this isn't a macro, so we just push it to the token queue*/
+ wonky_assert(is_valid_token(macro_name));
+ Queue_Push(translation_data->tokens,macro_name);
+ }
+ }
+ void preproc_lex_first_part(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct Source_File temp_src;
+ struct token *hold_token;
+ char just_in_case;
+
+ temp_src=*src;
+ hold_token=preproc_find_else(src,translation_data,1);
+
+
+ temp_src.src_size=src->where_in_src;
+ just_in_case=src->src[src->where_in_src];
+ src->src[src->where_in_src]='\0';
+
+ lex(&temp_src,translation_data);
+
+ src->src[src->where_in_src]=just_in_case;
+
+ if(hold_token!=NULL)
+ wonky_free(hold_token);
+ do
+ {
+ hold_token=preproc_find_else(src,translation_data,0);
+ if(hold_token)
+ wonky_free(hold_token);
+ else
+ break;
+ }while(!has_new_errors(translation_data));
+
+ if(hold_token!=NULL)
+ {
+ wonky_free(hold_token);
+ push_lexing_error("could not find matching #else, #elif or #endif",src,translation_data);
+ }
+ }
+ /*
+ we have skipped the #if part so this could be used for elif
+ */
+ void parse_preproc_if_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+
+ struct Queue *tokens;
+ struct Queue *swap;
+ struct AST *condition;
+ struct Scope *null_scope;
+ struct token *hold_token;
+ int result;
+
+ null_scope=get_normal_scope(NULL,FILE_SCOPE);
+
+ tokens=lex_line(src,translation_data,1);
+
+ swap=translation_data->tokens;
+ translation_data->tokens=tokens;
+
+ condition=parse_expression(translation_data,null_scope);
+ result=evaluate_const_expression_integer(condition,translation_data);
+ delete_normal_scope((struct Normal_Scope*)null_scope);
+ delete_ast(condition);
+
+ if(result)
+ {
+ preproc_lex_first_part(src,translation_data);
+ }else
+ {
+ hold_token=preproc_find_else(src,translation_data,0);
+ if(hold_token!=NULL && hold_token->type==PKW_ELIF)
+ {
+ parse_preproc_if_line(src,translation_data);
+ }
+ else if(hold_token!=NULL)
+ {
+ preproc_lex_first_part(src,translation_data);
+ }
+ }
+
+
+ }
+ struct token* preproc_find_else(struct Source_File *src,struct Translation_Data *translation_data,char jump_before)
+ {
+ struct token *hold_token;
+ struct Source_File temp_src;
+ int indentation=1;
+
+ temp_src=*src;
+ while(src->src[src->where_in_src]!='\0' && indentation)
+ {
+ /*BEWARE*/
+ temp_src=*src;
+ /*END BEWARE*/
+
+ hold_token=get_next_token(src,&chonky[0],1);
+ if(hold_token->type==KW_HASHTAG)
+ {
+ wonky_free(hold_token);
+ /*TODO FIX*/
+ /*hold_token=get_next_token(src,&chonky_jr[0],0);*/
+ hold_token=get_next_token(src,&chonky[0],0);
+ switch(hold_token->type)
+ {
+ case PKW_IF:
+ case PKW_IFDEF:
+ case PKW_IFNDEF:
+ ++indentation;
+ break;
+
+ case PKW_ENDIF:
+ --indentation;
+ break;
+
+ case PKW_ELSE:
+ case PKW_ELIF:
+ if(indentation==1)
+ {
+ if(jump_before)
+ *src=temp_src;
+ return hold_token;
+ }
+ else
+ {
+ break;
+ }
+ case PKW_NOTYPE:
+ wonky_free(hold_token);
+ goto_new_line(src,translation_data);
+ return NULL;
+ }
+ wonky_free(hold_token);
+
+ }else if(hold_token->type!=KW_NOTYPE)
+ {
+ wonky_free(hold_token);
+ }else
+ {
+ if(src->where_in_src!=src->src_size)
+ push_lexing_error("unexpected character",src,translation_data);
+ wonky_free(hold_token);
+ return NULL;
+ }
+ goto_new_line(src,translation_data);
+ }
+ /*BEWARE*/
+ //goto_new_line(src,translation_data);
+ /*END BEWARE*/
+ if(jump_before)
+ *src=temp_src;
+ return NULL;
+ }
+ void parse_preproc_ifdef_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct token *hold_token;
+ hold_token=get_next_token(src,&chonky[0],0);
+ if(hold_token==NULL || hold_token->type!=KW_ID)
+ {
+ wonky_free(hold_token);
+ push_lexing_error("expected an id here",src,translation_data);
+ chase_new_line(src,translation_data);
+ return;
+ }else
+ {
+ if(Map_Check(translation_data->macros,hold_token->data,hold_token->data_size))
+ {
+ preproc_lex_first_part(src,translation_data);
+ }else
+ {
+ wonky_free(hold_token);
+
+ hold_token=preproc_find_else(src,translation_data,1);
+
+ if(hold_token!=NULL && hold_token->type==PKW_ELIF)
+ {
+ parse_preproc_if_line(src,translation_data);
+ }else if(hold_token!=NULL)
+ {
+ preproc_find_else(src,translation_data,0);
+ preproc_lex_first_part(src,translation_data);
+ }
+
+ wonky_free(hold_token);
+ }
+
+ }
+ chase_new_line(src,translation_data);
+ }
+ void parse_preproc_ifndef_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct token *hold_token;
+ hold_token=get_next_token(src,&chonky[0],0);
+ if(hold_token==NULL || hold_token->type!=KW_ID)
+ {
+ push_lexing_error("expected an id here",src,translation_data);
+ chase_new_line(src,translation_data);
+ wonky_free(hold_token);
+ return;
+ }else
+ {
+ if(!Map_Check(translation_data->macros,hold_token->data,hold_token->data_size))
+ {
+ wonky_free(hold_token);
+ preproc_lex_first_part(src,translation_data);
+ }else
+ {
+ wonky_free(hold_token);
+
+ hold_token=preproc_find_else(src,translation_data,1);
+ if(hold_token!=NULL && hold_token->type==PKW_ELIF)
+ {
+ parse_preproc_if_line(src,translation_data);
+ }
+ else if(hold_token!=NULL)
+ {
+ preproc_find_else(src,translation_data,0);
+ preproc_lex_first_part(src,translation_data);
+ }
+ wonky_free(hold_token);
+ }
+
+ }
+ chase_new_line(src,translation_data);
+ }
+ void parse_preproc_undef_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct define_directive *hold_macro;
+ struct token *id;
+
+ id=get_next_token(src,&chonky[0],0);
+ if(id->type!=KW_ID)
+ {
+ push_lexing_error("expected an id here",src,translation_data);
+ }else
+ {
+ hold_macro=Map_Check(translation_data->macros,id->data,id->data_size);
+ if(hold_macro!=NULL)
+ {
+ delete_macro(hold_macro);
+ Map_Remove(translation_data->macros,id->data,id->data_size);
+ }
+ }
+ wonky_free(id);
+ chase_new_line(src,translation_data);
+ }
+ void parse_preproc_error_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ char *error;
+ size_t line,column;
+ error=src->src+src->where_in_src;
+ line=src->which_row+1;
+ column=src->which_column+1;
+
+
+ goto_new_line(src,translation_data);
+ src->src[src->where_in_src-1]='\0';
+
+ push_raw_translation_error(error,line,column,src->src_name->filename,translation_data);
+
+ }
+ void parse_preproc_line_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ struct Queue *tokens;
+ struct Translation_Data hack;
+ struct token *hold_line;
+ struct token *hold_name;
+ struct AST *line_expression;
+
+ tokens=lex_line(src,translation_data,0);
+ hack=*translation_data;
+ hack.tokens=tokens;
+ /*TODO account for other types of integer constants*/
+ if(check(&hack,KW_DECIMAL_CONSTANT,0))
+ {
+ hold_line=(struct token*)Queue_Pop(tokens);
+ line_expression=(struct AST*)get_constant_tree(get_expression_value_constant(extract_constant(hold_line,translation_data)));
+
+ src->which_row=evaluate_const_expression_integer(line_expression,translation_data);
+ if(check(&hack,KW_STRING,0))
+ {
+ hold_name=(struct token*)Queue_Pop(tokens);
+ hold_name->data[hold_name->data_size]='\0';
+ if(tokens->size>0)
+ {
+ wonky_free(hold_line);
+ wonky_free(hold_name);
+ flush_tokens(tokens);
+ push_lexing_error("expected a new line in #line preprocessing directive here",src,translation_data);
+ return;
+ }else
+ {
+ delete_source_name(src->src_name);
+ src->src_name=get_source_name(hold_name->data,"");
+ return;
+ }
+
+ }else if(tokens->size>0)
+ {
+ wonky_free(hold_line);
+ flush_tokens(tokens);
+ push_lexing_error("expected a string or new line in #line preprocessing directive here",src,translation_data);
+ return;
+ }
+
+ }else
+ {
+ flush_tokens(tokens);
+ push_lexing_error("expected a line number in #line preprocessing directive here",src,translation_data);
+ return;
+ }
+ }
+ void delete_macro(void *macro)
+ {
+ /*
+ #define AS_MACRO(x) ((struct define_directive*)macro)
+ wonky_free(AS_MACRO(macro)->macro_name);
+ flush_tokens(AS_MACRO(macro)->macro_tokens);
+ wonky_free(AS_MACRO(macro)->macro_tokens);
+ Map_Map(AS_MACRO(macro)->arguments,wonky_free);
+ wonky_free(AS_MACRO(macro)->arguments);
+ wonky_free(macro);
+ #undef AS_MACRO
+ */
+ }
+ struct Queue* lex_line(struct Source_File *src,struct Translation_Data *translation_data,char lex_defined_token)
+ {
+
+ struct Source_File temp_src;
+ struct token *hold_token;
+ struct Queue *tokens;
+ char just_in_case;
+
+ tokens=wonky_malloc(sizeof(struct Queue));
+ Queue_Init(tokens);
+
+
+ temp_src=*src;
+ goto_new_line(src,translation_data);
+ just_in_case=src->src[src->where_in_src];
+ src->src[src->where_in_src]='\0';
+
+ translation_data->tokens=tokens;
+
+ while((hold_token=get_next_token(&temp_src,&chonky[0],0))->type!=KW_NOTYPE)
+ {
+ if(lex_defined_token && hold_token->type==KW_ID && hold_token->data_size==7 && gstrn_cmp(hold_token->data,"defined",7))
+ {
+ wonky_free(hold_token);
+ hold_token=get_next_token(&temp_src,&chonky[0],0);
+ if(hold_token->type==KW_OPEN_NORMAL)
+ {
+ wonky_free(hold_token);
+ hold_token=get_next_token(&temp_src,&chonky[0],0);
+ if(hold_token->type!=KW_ID)
+ {
+ push_lexing_error("expected an id after '(' in defined",src,translation_data);
+ }else
+ {
+ struct token *hold_closing_token;
+ hold_closing_token=get_next_token(&temp_src,&chonky[0],0);
+ if(hold_closing_token->type!=KW_CLOSE_NORMAL)
+ {
+ push_lexing_error("expected an ')' after id in define",src,translation_data);
+ }else
+ {
+ if(!Map_Check(translation_data->macros,hold_token->data,hold_token->data_size))
+ {
+ hold_token->type=KW_DECIMAL_CONSTANT;
+ hold_token->data="0";
+ hold_token->data_size=1;
+ }else
+ {
+ hold_token->type=KW_DECIMAL_CONSTANT;
+ hold_token->data="1";
+ hold_token->data_size=1;
+ }
+
+ }
+ }
+ }else if(hold_token->type!=KW_ID)
+ {
+ push_lexing_error("expected an id after define",src,translation_data);
+ }else
+ {
+ if(!Map_Check(translation_data->macros,hold_token->data,hold_token->data_size))
+ {
+ hold_token->type=KW_DECIMAL_CONSTANT;
+ hold_token->data="0";
+ hold_token->data_size=1;
+ }else
+ {
+ hold_token->type=KW_DECIMAL_CONSTANT;
+ hold_token->data="1";
+ hold_token->data_size=1;
+ }
+ }
+ }
+ Queue_Push(tokens,hold_token);
+ }
+
+ wonky_free(hold_token);
+ src->src[src->where_in_src]=just_in_case;
+
+ return tokens;
+ }
+
+ #endif
F diff --git a/src/frontend/lex/lex_preprocessing_directive.h b/src/frontend/lex/lex_preprocessing_directive.h new file mode 100644 --- /dev/null +++ b/src/frontend/lex/lex_preprocessing_directive.h
+ #ifndef WONKY_LEX_PREPROCESSING_DIRECTIVE_H
+ #define WONKY_LEX_PREPROCESSING_DIRECTIVE_H WONKY_LEX_PREPROCESSING_DIRECTIVE_H
+ #include <lex_preprocessing_directive.hh>
+ #include <program.h>
+ #include <lexer.h>
+ #include <automata.h>
+ #include <gcc_error.h>
+ #include <map.h>
+ #include <scope.h>
+ #include <common.h>
+ #include <preprocessing.h>
+
+ void parse_preproc_line(struct Source_File *src,struct Translation_Data *translation_data);
+ void parse_include_line(struct Source_File *src,struct Translation_Data *translation_data);
+ void parse_define_line(struct Source_File *src,struct Translation_Data *translation_data);
+ void parse_preproc_if_line(struct Source_File *src,struct Translation_Data *translation_data);
+ void parse_preproc_ifdef_line(struct Source_File *src,struct Translation_Data *translation_data);
+ void parse_preproc_ifndef_line(struct Source_File *src,struct Translation_Data *translation_data);
+ void parse_preproc_undef_line(struct Source_File *src,struct Translation_Data *translation_data);
+ void parse_preproc_error_line(struct Source_File *src,struct Translation_Data *translation_data);
+ void parse_preproc_line_line(struct Source_File *src,struct Translation_Data *translation_data);
+
+
+ struct Queue* lex_line(struct Source_File *src,struct Translation_Data *translation_data,char lex_defined_token);
+ /*preproc if stuff*/
+ /*returns an else or elif token, or if it hits matching endif before that return NULL*/
+ struct token* preproc_find_else(struct Source_File *src,struct Translation_Data *translation_data,char jump_before);
+
+ /*hack*/
+ void preproc_lex_first_part(struct Source_File *src,struct Translation_Data *translation_data);
+
+
+ /*define stuff*/
+ void expand_macro(struct token* macro_name,struct Source_File *src,struct Translation_Data *translation_data);
+
+
+
+ struct define_directive* get_define_directive(struct token* macro_name);
+ struct Queue* make_define_argument_list(size_t number_of_arguments);
+
+ void expand_macro_argument(struct Queue *replacement_tokens,struct Source_File *src,struct Translation_Data *translation_data);
+ void load_macro_arguments(struct Queue *args,size_t number_of_arguments,struct Source_File *src,struct Translation_Data *translation_data);
+ void flush_macro_arguments(size_t number_of_arguments,struct Queue *args);
+ void parse_define_line(struct Source_File *src,struct Translation_Data *translation_data);
+
+ void delete_define_argument_list(size_t number_of_arguments,struct Queue *args);
+ void delete_macro(void *macro);
+
+ #endif
F diff --git a/src/frontend/lex/lex_preprocessing_directive.hh b/src/frontend/lex/lex_preprocessing_directive.hh new file mode 100644 --- /dev/null +++ b/src/frontend/lex/lex_preprocessing_directive.hh
+ #ifndef WONKY_LEX_PREPROCESSING_DIRECTIVE_HH
+ #define WONKY_LEX_PREPROCESSING_DIRECTIVE_HH WONKY_LEX_PREPROCESSING_DIRECTIVE_HH
+
+
+ #endif
F diff --git a/src/frontend/lex/lexer.c b/src/frontend/lex/lexer.c --- a/src/frontend/lex/lexer.c +++ b/src/frontend/lex/lexer.c
#define WONKY_LEXER_C WONKY_LEXER_C
/*asdf*/#include <lexer.h>
+
+ void lex(struct Source_Name *src_name,struct Program *program)
+ {
+ struct Lexer_Data *lexer_data;
+ struct Preprocessing_Translation_Unit *lexed_unit;
+
+ lexer_data=get_lexer_data(src_name,program);
+
+ lexed_unit=lex_inner(lexer_data);
+
+
+
+ Map_Push(
+ program->preprocessing_translation_units,
+ src_name->normalised_source_name,
+ src_name->normalised_name_size,
+ lexed_unit
+ );
+
+
+
+ delete_lexer_data(lexer_data);
+ }
+
+
+ struct Preprocessing_Translation_Unit* lex_inner(struct Lexer_Data *lexer_data)
+ {
+ struct Preprocessing_Translation_Unit *unit;
+ struct token *token;
+
+ while(lexer_skip_white_space(lexer_data) , !lexer_eof(lexer_data))
+ {
+ token=lexer_extract_next_token(lexer_data);
+ push_token_into_preprocessing_translation_unit(unit,token);
+ }
+
+ return unit;
+ }
+
+
+ struct Lexer_Data* get_lexer_data(struct Source_Name *src_name,struct Program *program)
+ {
+ struct Lexer_Data *ret;
+
+ ret=wonky_malloc(sizeof(struct Lexer_Data));
+ ret->where_in_src=0;
+ ret->which_column=0;
+ ret->which_row=0;
+ ret->token_size=0;
+ ret->best_token_size=0;
+ ret->best_token_line=0;
+ ret->best_token_column=0;
+ ret->best_token_where_in_src_start=0;
+ ret->best_token_where_in_src_end=0;
+ ret->best_token_beg_line=0;
+ ret->is_in_the_begining_of_line=0;
+ ret->src=get_source_file(src_name);
+ ret->program=program;
+
+ return ret;
+
+ }
+
+
+ void lexer_skip_white_space(struct Lexer_Data *lexer_data)
+ {
+ enum White_Space_States
+ {
+ BLANK_SPACE,
+ POSSIBLE_LINE_SPLICE,
+ NON_WHITE_SPACE
+ }state=BLANK_SPACE;
+
+ while(state!=NON_WHITE_SPACE && !lexer_eof(lexer_data))
+ switch(lexer_data->src->src[lexer_data->where_in_src])
+ {
+ '\n':
+ state=BLANK_SPACE;
+ ++lexer_data->where_in_src;
+ break;
+ ' ':
+ '\t':
+ '\v':
+ if(state==POSSIBLE_LINE_SPLICE)
+ state=NON_WHITE_SPACE;
+ else
+ ++lexer_data->where_in_src;
+
+ break;
+ '\':
+ if(state==POSSIBLE_LINE_SPLICE)
+ {
+ state=NON_WHITE_SPACE;
+ }else
+ {
+ ++lexer_data->where_in_src;
+ state=POSSIBLE_LINE_SPLICE;
+ }
+ break;
+ default:
+ state=NON_WHITE_SPACE;
+ }
+
+ }
+ inline _Bool lexer_eof(struct Lexer_Data *lexer_data)
+ {
+ return lexer_data->where_in_src==lexer_data->src->src_size;
+ }
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
void lex(struct Source_File *src,struct Translation_Data *translation_data)
{
F diff --git a/src/frontend/lex/lexer.h b/src/frontend/lex/lexer.h --- a/src/frontend/lex/lexer.h +++ b/src/frontend/lex/lexer.h
#include <stdio.h>
#include <queue.h>
#include <program.h>
- #include <preprocessing.h>
#include <common.h>
#include <wonky_malloc.h>
size_t best_token_where_in_src_start;
size_t best_token_where_in_src_end;
- char best_token_beg_line;
+ _Bool best_token_beg_line;
- char is_in_the_begining_of_line;
+ _Bool is_in_the_begining_of_line;
struct Source_File *src;
struct Program *program;
};
+
+
+
+
+ void lex(struct Source_Name *src_name,struct Program *program);
+ struct Preprocessing_Translation_Unit* lex_inner(struct Lexer_Data *lexer_data);
+ struct Lexer_Data* get_lexer_data(struct Source_Name *src_name,struct Program *program);
+
+ void lexer_skip_white_space(struct Lexer_Data *lexer_data);
+ inline _Bool lexer_eof(struct Lexer_Data *lexer_data);
+
+ struct token* lexer_extract_next_token(struct Lexer_Data *lexer_data);
+
+ void delete_lexer_data(struct Lexer_Data *lexer_data);
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
void lex(struct Lexer_Data *lexer_data);
struct token* get_next_token(struct Source_File *src,struct Automata_Node *start_state,char skip_new_line);
F diff --git a/src/frontend/lex/preprocessing.c b/src/frontend/lex/preprocessing.c deleted file mode 100644 --- a/src/frontend/lex/preprocessing.c +++ /dev/null
- #ifndef WONKY_PREPROCESSING_C
- #define WONKY_PREPROCESSING_C WONKY_PREPROCESSING_C
- #include <preprocessing.h>
-
-
- /*we have skipped the leading #*/
- /*
- #include string
- #include <qchar>
- #define [ id(list) replacement
- #undef [ id ]
- #if
- #ifdef
- #ifndef
- #
-
- #elif
- #else
- #endif
-
-
- #error
- #pragma
- #line number [string]
-
- */
- void parse_preproc_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct token *hold;
- /*TODO fix!*/
- /*hold=get_next_token(src,&chonky_jr[0],0);*/
- hold=get_next_token(src,&chonky[0],0);
- switch(hold->type)
- {
- case PKW_INCLUDE:
- wonky_free(hold);
- parse_include_line(src,translation_data);
- return;
- case PKW_DEFINE:
- wonky_free(hold);
- parse_define_line(src,translation_data);
- return;
- case PKW_IF:
- wonky_free(hold);
- parse_preproc_if_line(src,translation_data);
- return;
- case PKW_IFDEF:
- wonky_free(hold);
- parse_preproc_ifdef_line(src,translation_data);
- return;
- case PKW_IFNDEF:
- wonky_free(hold);
- parse_preproc_ifndef_line(src,translation_data);
- return;
- case PKW_UNDEF:
- wonky_free(hold);
- parse_preproc_undef_line(src,translation_data);
- return;
- case PKW_ENDIF:
- wonky_free(hold);
- push_lexing_error("unmatched endif",src,translation_data);
- return;
- case PKW_ELSE:
- wonky_free(hold);
- push_lexing_error("unmatched else",src,translation_data);
- return;
- case PKW_ELIF:
- wonky_free(hold);
- push_lexing_error("unmatched elif",src,translation_data);
- return;
- case PKW_LINE:
- wonky_free(hold);
- parse_preproc_line_line(src,translation_data);
- return;
- case PKW_ERROR:
- wonky_free(hold);
- parse_preproc_error_line(src,translation_data);
- return;
- default:
- /*TODO error*/
- wonky_free(hold);
- push_lexing_error("expected a preprocessing directive",src,translation_data);
- return;
-
- }
- }
- void parse_include_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct token *hold;
- hold=get_next_token(src,&chonky[0],0);
- if(hold->type==KW_STRING)
- {
- char *where_to_search[]={src->src_name->base,NULL};
- struct Source_File *hold_file;
-
- hold->data[hold->data_size-1]='\0';
- hold->data_size-=2;
- ++hold->data;
- handle_splicing(hold);
-
-
- /*search in the directory of the file from which we include*/
- hold_file=get_source_file(hold->data,where_to_search);
- /*fallback to well known locations == <>*/
- if(hold_file==NULL)
- {
- hold_file=get_source_file(hold->data,well_known_locations_base);
- if(hold_file==NULL)
- {
- /*TODO error*/
- push_lexing_error("file in include directive not found",src,translation_data);
- wonky_free(hold);
- return;
- }
- }
- lex_program(translation_data,hold_file);
- wonky_free(hold);
- }else if(hold->type==KW_LESS)/*hack*/
- {
- struct Source_File *hold_file;
- ++hold->data;
- while(src->src[src->where_in_src]!='>' && src->where_in_src<src->src_size)
- {
- ++src->where_in_src;
- ++hold->data_size;
- }
- if(src->where_in_src==src->src_size)
- {
- /*TODO error*/
- wonky_free(hold);
- return;
- }
- /*skip the >*/
- ++src->where_in_src;
- hold->data[hold->data_size-1]='\0';
- handle_splicing(hold);
-
- hold_file=get_source_file(hold->data,well_known_locations_base);
- if(hold_file==NULL)
- {
- /*TODO error*/
- push_lexing_error("file in include directive not found",src,translation_data);
- wonky_free(hold);
- return;
- }
-
- lex_program(translation_data,hold_file);
- wonky_free(hold);
-
- }else
- {
- /*TODO error*/
- push_lexing_error("include error",src,translation_data);
- wonky_free(hold);
- return;
- }
-
-
- chase_new_line(src,translation_data);
- }
-
- /*skipped # and 'define'*/
- void parse_define_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct token *hold_token;
- struct token *macro_name;
- struct define_directive *new_macro;
- struct Queue *hold_tokens;
- size_t number_of_arguments=0;
- int *hold_index;
-
-
- macro_name=get_next_token(src,&chonky[0],0);
- if(macro_name->type!=KW_ID)
- {
- wonky_free(macro_name);
- push_lexing_error("expected id after #define",src,translation_data);
- return;
- }
-
- new_macro=get_define_directive(macro_name);
- /*white space*/
- hold_token=get_next_token(src,&chonky[0],0);
- if(hold_token->type==KW_OPEN_NORMAL)
- {
- wonky_free(hold_token);
- while(1)
- {
- hold_token=get_next_token(src,&chonky[0],0);
- if(hold_token->type!=KW_ID)
- {
- push_lexing_error("expected id in define argument list",src,translation_data);
- wonky_free(hold_token);
- break;
- }
- hold_index=wonky_malloc(sizeof(int));
- *hold_index=number_of_arguments;
- ++number_of_arguments;
- Map_Push(new_macro->arguments,hold_token->data,hold_token->data_size,hold_index);
- wonky_free(hold_token);
- hold_token=get_next_token(src,&chonky[0],0);
- if(hold_token->type!=KW_COMMA)
- {
- if(hold_token->type==KW_CLOSE_NORMAL)
- {
- wonky_free(hold_token);
- break;
- }else
- {
- push_lexing_error("expected ',' in define argument list",src,translation_data);
- wonky_free(hold_token);
- break;
- }
- }
- wonky_free(hold_token);
- }
-
- }else if(hold_token->type==KW_NOTYPE)
- {
- wonky_free(hold_token);
- }
-
- /*push things*/
-
- hold_tokens=translation_data->tokens;
- translation_data->tokens=new_macro->macro_tokens;
-
- new_macro->number_of_arguments=number_of_arguments;
- /*there is something in hold_token*/
- while( (hold_token=get_next_token(src,&chonky[0],0))->type != KW_NOTYPE)
- {
- expand_macro(hold_token,src,translation_data);
- }
-
- /*removing the notype token*/
- wonky_free(hold_token);
-
- translation_data->tokens=hold_tokens;
- /*push the directive into the macro map*/
- Map_Push(translation_data->macros,macro_name->data,macro_name->data_size,new_macro);
- //wonky_free(macro_name);
- chase_new_line(src,translation_data);
-
- }
- /*
- id[(list)] tokens \n
- */
- struct define_directive* get_define_directive(struct token* macro_name)
- {
- struct define_directive *ret;
- ret=wonky_malloc(sizeof(struct token));
- ret->macro_name=macro_name;
-
- ret->macro_tokens=wonky_malloc(sizeof(struct Queue));
- Queue_Init(ret->macro_tokens);
-
- ret->arguments=wonky_malloc(sizeof(struct Map));
- Map_Init(ret->arguments);
-
- ret->number_of_arguments=0;
-
- return ret;
- }
-
- /*returns an array of queues*/
- struct Queue* make_define_argument_list(size_t number_of_arguments)
- {
- size_t i;
- struct Queue *ret;
-
- if(number_of_arguments==0)
- return NULL;
-
- ret=wonky_malloc(sizeof(struct Queue)*number_of_arguments);
-
- for(i=0;i<number_of_arguments;++i)
- {
- Queue_Init(ret+i);
- }
- return ret;
- }
- void delete_define_argument_list(size_t number_of_arguments,struct Queue *args)
- {
- if(number_of_arguments==0)
- {
- wonky_assert(args==NULL);
- return;
- }
- flush_macro_arguments(number_of_arguments,args);
- wonky_free(args);
- }
-
- void expand_macro_argument(struct Queue *replacement_tokens,struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct Queue_Node *it;
- struct token *hold_token;
- for(it=replacement_tokens->first;it!=NULL;it=it->prev)
- {
- hold_token=copy_token((struct token*)it->data);
- hold_token->line=src->which_row;
- hold_token->column=src->which_column;
- Queue_Push(translation_data->tokens,hold_token);
- //Queue_Push(translation_data->tokens,copy_token((struct token*)it->data));
- }
- }
- void load_macro_arguments(struct Queue *args,size_t number_of_arguments,struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct token *hold;
- struct Queue *hack;
- size_t i;
- size_t j;
-
- if(number_of_arguments==0)
- return;
-
- hold=get_next_token(src,&chonky[0],1);
- if(hold->type!=KW_OPEN_NORMAL)
- {
- push_lexing_error("expected '(' in macro expansion",src,translation_data);
- wonky_free(hold);
- return;
- }
- wonky_free(hold);
-
- hack=translation_data->tokens;
- for(i=0;i<number_of_arguments-1;++i)
- {
- translation_data->tokens=args+i;
- for(
- hold=get_next_token(src,&chonky[0],1),j=0;
- hold->type!=KW_COMMA && hold->type!=KW_NOTYPE;
- hold=get_next_token(src,&chonky[0],1),++j
- )
- {
- expand_macro(hold,src,translation_data);
- }
- if(hold->type==KW_NOTYPE)
- {
- push_lexing_error("expected ',' in macro argument list",src,translation_data);
- wonky_free(hold);
- goto cleanup;
- }
- if(j==0)
- {
- push_lexing_error("expected argument in macro argument list",src,translation_data);
- wonky_free(hold);
- goto cleanup;
- }
-
- }
- translation_data->tokens=args+i;
- for(
- hold=get_next_token(src,&chonky[0],1),j=0;
- hold->type!=KW_CLOSE_NORMAL;
- hold=get_next_token(src,&chonky[0],1),++j
- )
- {
- if(hold->type==KW_NOTYPE)
- {
- push_lexing_error("expected ')' in macro argument list",src,translation_data);
- wonky_free(hold);
- goto cleanup;
- }
- expand_macro(hold,src,translation_data);
- }
- if(j==0)
- {
- push_lexing_error("expected argument in macro argument list",src,translation_data);
- wonky_free(hold);
- }
-
- cleanup:
- translation_data->tokens=hack;
-
-
- }
- void flush_macro_arguments(size_t number_of_arguments,struct Queue *args)
- {
- size_t i;
- for(i=0;i<number_of_arguments;++i)
- {
- while(args[i].size>0)
- wonky_free(Queue_Pop(args+i));
- }
- }
- /*macro name token is wonky_freed on expansion , if it is not a macro name it is pushed into token queue*/
- void expand_macro(struct token* macro_name,struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct define_directive *hold=NULL;
- struct token *hold_token;
- int *index;
- struct Queue_Node *it;
- struct Queue *argument_list;
-
- if(macro_name->type==KW_ID)
- hold=Map_Check(translation_data->macros,macro_name->data,macro_name->data_size);
- if(hold!=NULL)
- {
- wonky_free(macro_name);
- argument_list=make_define_argument_list(hold->number_of_arguments);
- load_macro_arguments(argument_list,hold->number_of_arguments,src,translation_data);
- if(translation_data->errors->size>0)
- {
- delete_define_argument_list(hold->number_of_arguments,argument_list);
- return;
- }
-
-
- for(it=hold->macro_tokens->first;it!=NULL;it=it->prev)
- {
- hold_token=(struct token*)it->data;
- index=Map_Check(hold->arguments,hold_token->data,hold_token->data_size);
- if(index!=NULL)
- {
- expand_macro_argument(argument_list+*index,src,translation_data);
- }else
- {
- hold_token=copy_token(hold_token);
-
- hold_token->line=src->which_row;
- hold_token->column=src->which_column;
-
- wonky_assert(is_valid_token(hold_token));
-
- Queue_Push(translation_data->tokens,hold_token);
- }
- }
- delete_define_argument_list(hold->number_of_arguments,argument_list);
- }else
- {
- /*this isn't a macro, so we just push it to the token queue*/
- wonky_assert(is_valid_token(macro_name));
- Queue_Push(translation_data->tokens,macro_name);
- }
- }
- void preproc_lex_first_part(struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct Source_File temp_src;
- struct token *hold_token;
- char just_in_case;
-
- temp_src=*src;
- hold_token=preproc_find_else(src,translation_data,1);
-
-
- temp_src.src_size=src->where_in_src;
- just_in_case=src->src[src->where_in_src];
- src->src[src->where_in_src]='\0';
-
- lex(&temp_src,translation_data);
-
- src->src[src->where_in_src]=just_in_case;
-
- if(hold_token!=NULL)
- wonky_free(hold_token);
- do
- {
- hold_token=preproc_find_else(src,translation_data,0);
- if(hold_token)
- wonky_free(hold_token);
- else
- break;
- }while(!has_new_errors(translation_data));
-
- if(hold_token!=NULL)
- {
- wonky_free(hold_token);
- push_lexing_error("could not find matching #else, #elif or #endif",src,translation_data);
- }
- }
- /*
- we have skipped the #if part so this could be used for elif
- */
- void parse_preproc_if_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
-
- struct Queue *tokens;
- struct Queue *swap;
- struct AST *condition;
- struct Scope *null_scope;
- struct token *hold_token;
- int result;
-
- null_scope=get_normal_scope(NULL,FILE_SCOPE);
-
- tokens=lex_line(src,translation_data,1);
-
- swap=translation_data->tokens;
- translation_data->tokens=tokens;
-
- condition=parse_expression(translation_data,null_scope);
- result=evaluate_const_expression_integer(condition,translation_data);
- delete_normal_scope((struct Normal_Scope*)null_scope);
- delete_ast(condition);
-
- if(result)
- {
- preproc_lex_first_part(src,translation_data);
- }else
- {
- hold_token=preproc_find_else(src,translation_data,0);
- if(hold_token!=NULL && hold_token->type==PKW_ELIF)
- {
- parse_preproc_if_line(src,translation_data);
- }
- else if(hold_token!=NULL)
- {
- preproc_lex_first_part(src,translation_data);
- }
- }
-
-
- }
- struct token* preproc_find_else(struct Source_File *src,struct Translation_Data *translation_data,char jump_before)
- {
- struct token *hold_token;
- struct Source_File temp_src;
- int indentation=1;
-
- temp_src=*src;
- while(src->src[src->where_in_src]!='\0' && indentation)
- {
- /*BEWARE*/
- temp_src=*src;
- /*END BEWARE*/
-
- hold_token=get_next_token(src,&chonky[0],1);
- if(hold_token->type==KW_HASHTAG)
- {
- wonky_free(hold_token);
- /*TODO FIX*/
- /*hold_token=get_next_token(src,&chonky_jr[0],0);*/
- hold_token=get_next_token(src,&chonky[0],0);
- switch(hold_token->type)
- {
- case PKW_IF:
- case PKW_IFDEF:
- case PKW_IFNDEF:
- ++indentation;
- break;
-
- case PKW_ENDIF:
- --indentation;
- break;
-
- case PKW_ELSE:
- case PKW_ELIF:
- if(indentation==1)
- {
- if(jump_before)
- *src=temp_src;
- return hold_token;
- }
- else
- {
- break;
- }
- case PKW_NOTYPE:
- wonky_free(hold_token);
- goto_new_line(src,translation_data);
- return NULL;
- }
- wonky_free(hold_token);
-
- }else if(hold_token->type!=KW_NOTYPE)
- {
- wonky_free(hold_token);
- }else
- {
- if(src->where_in_src!=src->src_size)
- push_lexing_error("unexpected character",src,translation_data);
- wonky_free(hold_token);
- return NULL;
- }
- goto_new_line(src,translation_data);
- }
- /*BEWARE*/
- //goto_new_line(src,translation_data);
- /*END BEWARE*/
- if(jump_before)
- *src=temp_src;
- return NULL;
- }
- void parse_preproc_ifdef_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct token *hold_token;
- hold_token=get_next_token(src,&chonky[0],0);
- if(hold_token==NULL || hold_token->type!=KW_ID)
- {
- wonky_free(hold_token);
- push_lexing_error("expected an id here",src,translation_data);
- chase_new_line(src,translation_data);
- return;
- }else
- {
- if(Map_Check(translation_data->macros,hold_token->data,hold_token->data_size))
- {
- preproc_lex_first_part(src,translation_data);
- }else
- {
- wonky_free(hold_token);
-
- hold_token=preproc_find_else(src,translation_data,1);
-
- if(hold_token!=NULL && hold_token->type==PKW_ELIF)
- {
- parse_preproc_if_line(src,translation_data);
- }else if(hold_token!=NULL)
- {
- preproc_find_else(src,translation_data,0);
- preproc_lex_first_part(src,translation_data);
- }
-
- wonky_free(hold_token);
- }
-
- }
- chase_new_line(src,translation_data);
- }
- void parse_preproc_ifndef_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct token *hold_token;
- hold_token=get_next_token(src,&chonky[0],0);
- if(hold_token==NULL || hold_token->type!=KW_ID)
- {
- push_lexing_error("expected an id here",src,translation_data);
- chase_new_line(src,translation_data);
- wonky_free(hold_token);
- return;
- }else
- {
- if(!Map_Check(translation_data->macros,hold_token->data,hold_token->data_size))
- {
- wonky_free(hold_token);
- preproc_lex_first_part(src,translation_data);
- }else
- {
- wonky_free(hold_token);
-
- hold_token=preproc_find_else(src,translation_data,1);
- if(hold_token!=NULL && hold_token->type==PKW_ELIF)
- {
- parse_preproc_if_line(src,translation_data);
- }
- else if(hold_token!=NULL)
- {
- preproc_find_else(src,translation_data,0);
- preproc_lex_first_part(src,translation_data);
- }
- wonky_free(hold_token);
- }
-
- }
- chase_new_line(src,translation_data);
- }
- void parse_preproc_undef_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct define_directive *hold_macro;
- struct token *id;
-
- id=get_next_token(src,&chonky[0],0);
- if(id->type!=KW_ID)
- {
- push_lexing_error("expected an id here",src,translation_data);
- }else
- {
- hold_macro=Map_Check(translation_data->macros,id->data,id->data_size);
- if(hold_macro!=NULL)
- {
- delete_macro(hold_macro);
- Map_Remove(translation_data->macros,id->data,id->data_size);
- }
- }
- wonky_free(id);
- chase_new_line(src,translation_data);
- }
- void parse_preproc_error_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
- char *error;
- size_t line,column;
- error=src->src+src->where_in_src;
- line=src->which_row+1;
- column=src->which_column+1;
-
-
- goto_new_line(src,translation_data);
- src->src[src->where_in_src-1]='\0';
-
- push_raw_translation_error(error,line,column,src->src_name->filename,translation_data);
-
- }
- void parse_preproc_line_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
- struct Queue *tokens;
- struct Translation_Data hack;
- struct token *hold_line;
- struct token *hold_name;
- struct AST *line_expression;
-
- tokens=lex_line(src,translation_data,0);
- hack=*translation_data;
- hack.tokens=tokens;
- /*TODO account for other types of integer constants*/
- if(check(&hack,KW_DECIMAL_CONSTANT,0))
- {
- hold_line=(struct token*)Queue_Pop(tokens);
- line_expression=(struct AST*)get_constant_tree(get_expression_value_constant(extract_constant(hold_line,translation_data)));
-
- src->which_row=evaluate_const_expression_integer(line_expression,translation_data);
- if(check(&hack,KW_STRING,0))
- {
- hold_name=(struct token*)Queue_Pop(tokens);
- hold_name->data[hold_name->data_size]='\0';
- if(tokens->size>0)
- {
- wonky_free(hold_line);
- wonky_free(hold_name);
- flush_tokens(tokens);
- push_lexing_error("expected a new line in #line preprocessing directive here",src,translation_data);
- return;
- }else
- {
- delete_source_name(src->src_name);
- src->src_name=get_source_name(hold_name->data,"");
- return;
- }
-
- }else if(tokens->size>0)
- {
- wonky_free(hold_line);
- flush_tokens(tokens);
- push_lexing_error("expected a string or new line in #line preprocessing directive here",src,translation_data);
- return;
- }
-
- }else
- {
- flush_tokens(tokens);
- push_lexing_error("expected a line number in #line preprocessing directive here",src,translation_data);
- return;
- }
- }
- void delete_macro(void *macro)
- {
- /*
- #define AS_MACRO(x) ((struct define_directive*)macro)
- wonky_free(AS_MACRO(macro)->macro_name);
- flush_tokens(AS_MACRO(macro)->macro_tokens);
- wonky_free(AS_MACRO(macro)->macro_tokens);
- Map_Map(AS_MACRO(macro)->arguments,wonky_free);
- wonky_free(AS_MACRO(macro)->arguments);
- wonky_free(macro);
- #undef AS_MACRO
- */
- }
- struct Queue* lex_line(struct Source_File *src,struct Translation_Data *translation_data,char lex_defined_token)
- {
-
- struct Source_File temp_src;
- struct token *hold_token;
- struct Queue *tokens;
- char just_in_case;
-
- tokens=wonky_malloc(sizeof(struct Queue));
- Queue_Init(tokens);
-
-
- temp_src=*src;
- goto_new_line(src,translation_data);
- just_in_case=src->src[src->where_in_src];
- src->src[src->where_in_src]='\0';
-
- translation_data->tokens=tokens;
-
- while((hold_token=get_next_token(&temp_src,&chonky[0],0))->type!=KW_NOTYPE)
- {
- if(lex_defined_token && hold_token->type==KW_ID && hold_token->data_size==7 && gstrn_cmp(hold_token->data,"defined",7))
- {
- wonky_free(hold_token);
- hold_token=get_next_token(&temp_src,&chonky[0],0);
- if(hold_token->type==KW_OPEN_NORMAL)
- {
- wonky_free(hold_token);
- hold_token=get_next_token(&temp_src,&chonky[0],0);
- if(hold_token->type!=KW_ID)
- {
- push_lexing_error("expected an id after '(' in defined",src,translation_data);
- }else
- {
- struct token *hold_closing_token;
- hold_closing_token=get_next_token(&temp_src,&chonky[0],0);
- if(hold_closing_token->type!=KW_CLOSE_NORMAL)
- {
- push_lexing_error("expected an ')' after id in define",src,translation_data);
- }else
- {
- if(!Map_Check(translation_data->macros,hold_token->data,hold_token->data_size))
- {
- hold_token->type=KW_DECIMAL_CONSTANT;
- hold_token->data="0";
- hold_token->data_size=1;
- }else
- {
- hold_token->type=KW_DECIMAL_CONSTANT;
- hold_token->data="1";
- hold_token->data_size=1;
- }
-
- }
- }
- }else if(hold_token->type!=KW_ID)
- {
- push_lexing_error("expected an id after define",src,translation_data);
- }else
- {
- if(!Map_Check(translation_data->macros,hold_token->data,hold_token->data_size))
- {
- hold_token->type=KW_DECIMAL_CONSTANT;
- hold_token->data="0";
- hold_token->data_size=1;
- }else
- {
- hold_token->type=KW_DECIMAL_CONSTANT;
- hold_token->data="1";
- hold_token->data_size=1;
- }
- }
- }
- Queue_Push(tokens,hold_token);
- }
-
- wonky_free(hold_token);
- src->src[src->where_in_src]=just_in_case;
-
- return tokens;
- }
- #endif
F diff --git a/src/frontend/lex/preprocessing.h b/src/frontend/lex/preprocessing.h deleted file mode 100644 --- a/src/frontend/lex/preprocessing.h +++ /dev/null
- #ifndef WONKY_PREPROCESSING_H
- #define WONKY_PREPROCESSING_H WONKY_PREPROCESSING_H
- #include <preprocessing.hh>
- #include <program.h>
- #include <lexer.h>
- #include <automata.h>
- #include <gcc_error.h>
- #include <map.h>
- #include <scope.h>
- #include <common.h>
-
- struct define_directive
- {
- struct token *macro_name;
-
- /*the tokens of the macro*/
- struct Queue *macro_tokens;
-
- /*ints are stored here*/
- struct Map *arguments;
-
- /*put arguments here*/
- /*an array of token queues*/
- size_t number_of_arguments;
- };
-
- void parse_preproc_line(struct Source_File *src,struct Translation_Data *translation_data);
- void parse_include_line(struct Source_File *src,struct Translation_Data *translation_data);
- void parse_define_line(struct Source_File *src,struct Translation_Data *translation_data);
- void parse_preproc_if_line(struct Source_File *src,struct Translation_Data *translation_data);
- void parse_preproc_ifdef_line(struct Source_File *src,struct Translation_Data *translation_data);
- void parse_preproc_ifndef_line(struct Source_File *src,struct Translation_Data *translation_data);
- void parse_preproc_undef_line(struct Source_File *src,struct Translation_Data *translation_data);
- void parse_preproc_error_line(struct Source_File *src,struct Translation_Data *translation_data);
- void parse_preproc_line_line(struct Source_File *src,struct Translation_Data *translation_data);
-
-
- struct Queue* lex_line(struct Source_File *src,struct Translation_Data *translation_data,char lex_defined_token);
- /*preproc if stuff*/
- /*returns an else or elif token, or if it hits matching endif before that return NULL*/
- struct token* preproc_find_else(struct Source_File *src,struct Translation_Data *translation_data,char jump_before);
-
- /*hack*/
- void preproc_lex_first_part(struct Source_File *src,struct Translation_Data *translation_data);
-
-
- /*define stuff*/
- void expand_macro(struct token* macro_name,struct Source_File *src,struct Translation_Data *translation_data);
-
-
-
- struct define_directive* get_define_directive(struct token* macro_name);
- struct Queue* make_define_argument_list(size_t number_of_arguments);
-
- void expand_macro_argument(struct Queue *replacement_tokens,struct Source_File *src,struct Translation_Data *translation_data);
- void load_macro_arguments(struct Queue *args,size_t number_of_arguments,struct Source_File *src,struct Translation_Data *translation_data);
- void flush_macro_arguments(size_t number_of_arguments,struct Queue *args);
- void parse_define_line(struct Source_File *src,struct Translation_Data *translation_data);
-
- void delete_define_argument_list(size_t number_of_arguments,struct Queue *args);
- void delete_macro(void *macro);
-
- #endif
F diff --git a/src/frontend/lex/preprocessing.hh b/src/frontend/lex/preprocessing.hh deleted file mode 100644 --- a/src/frontend/lex/preprocessing.hh +++ /dev/null
- #ifndef WONKY_PREPROCESSING_H
- #define WONKY_PREPROCESSING_H WONKY_PREPROCESSING_H
-
- struct define_directive;
-
- #endif
F diff --git a/src/semantics/memory/memory_location.h b/src/semantics/memory/memory_location.h --- a/src/semantics/memory/memory_location.h +++ b/src/semantics/memory/memory_location.h
#ifndef WONKY_LOCATION_H
#define WONKY_LOCATION_H WONKY_LOCATION_H
- #include <location.hh>
+ #include <memory_location.hh>
#include <lexer.h>
#include <type.h>
#include <common.h>
F diff --git a/src/semantics/program/source_file.c b/src/semantics/program/source_file.c deleted file mode 100644 --- a/src/semantics/program/source_file.c +++ /dev/null
- #ifndef WONKY_SOURCE_FILE_C
- #define WONKY_SOURCE_FILE_C WONKY_SOURCE_FILE_C
- #include <source_file.h>
-
- char *well_known_locations_base[]={"","/usr/include/","/usr/include/x86_64-linux-gnu/",NULL};
- struct Source_File* extract_source_file(FILE *in,struct Source_Name *name)
- {
- long file_size;
- struct Source_File *src;
-
-
- if(fseek(in,0,SEEK_END)==-1)
- return NULL;
- if((file_size=ftell(in))==-1)
- return NULL;
- if(fseek(in,0,SEEK_SET)==-1)
- return NULL;
-
- src=wonky_malloc(sizeof(struct Source_File));
-
- src->src_name=name;
-
- src->src=wonky_malloc(file_size+1);
- src->src_size=file_size;
-
- src->where_in_src=0;
-
- src->which_column=0;
- src->which_row=0;
- src->is_in_the_begining_of_line=1;
-
- src->src[file_size]='\0';
-
- fread(src->src,1,file_size,in);
- fclose(in);
- return src;
- }
- /*this might cause compatability issues TODO*/
- void normalise_source_name(struct Source_Name *name)
- {
- size_t offset;
- size_t i;
- size_t last_slash;
- char *hold_base;
-
- for(last_slash=offset=0;name->filename[offset];++offset)
- {
- if(name->filename[offset]=='/')
- {
- last_slash=offset;
- }
- }
-
- if(last_slash==0)
- return;
-
- if(name->base==NULL)
- {
- offset=0;
- name->base=wonky_malloc(last_slash+1);
- name->base[last_slash]='\0';
- name->base[last_slash-1]='/';
-
- }else
- {
- offset=gstrlen((char*)name->base);
- hold_base=wonky_malloc(offset+last_slash+2);
- strmv(hold_base,(char*)name->base);
-
- hold_base[last_slash+offset]='/';
- hold_base[last_slash+offset+1]='\0';
- wonky_free((void*)name->base);
-
- name->base=hold_base;
- }
-
- for(i=0;i<last_slash;++i)
- name->base[offset+i]=name->filename[i];
-
-
- ++i;
- /*prune the filename*/
- offset=gstrlen(name->filename+i);
- hold_base=wonky_malloc(offset+1);
- strmv(hold_base,name->filename+i);
- wonky_free(name->filename);
- name->filename=hold_base;
-
-
- }
- /*here be dragons*/
- char src_getc(struct Source_File *src,char skip_line_splice,char skip_comments,char skip_new_line)
- {
- superhack:
- if(src->src[src->where_in_src]=='\\' && skip_line_splice)
- {
- if(src->where_in_src < src->src_size-1 && src->src[src->where_in_src+1]=='\n')
- {
- src->where_in_src+=2;
- ++src->which_row;
- src->token_size+=2;
- src->which_column=0;
- goto superhack;
- }else
- {
- ++src->token_size;
- ++src->which_column;
- ++src->where_in_src;
- src->is_in_the_begining_of_line=0;
- return '\\';
- }
- }else
- {
- if(src->src[src->where_in_src]=='\n' && skip_new_line)
- {
- ++src->which_row;
- src->which_column=0;
- src->is_in_the_begining_of_line=1;
-
- ++src->where_in_src;
- goto superhack;
- }else if(src->src[src->where_in_src]=='/' && skip_comments)
- {
- if(src->src[src->where_in_src+1]=='*')
- {
- char hold_char;
-
-
- src->where_in_src+=2;
- hold_char=src_getc(src,1,0,1);
- while(hold_char)
- {
- if(hold_char=='*')
- {
- hold_char=src_getc(src,1,0,1);
- if(hold_char=='\0')
- {
- src->where_in_src=src->src_size;
- return '\0';
- }
- else if(hold_char=='/')
- {
- goto superhack;
- }
- }else
- {
- hold_char=src_getc(src,1,0,1);
- }
- }
- src->where_in_src=src->src_size;
- return '\0';
-
- }
- }else
- {
- ++src->which_column;
- }
- ++src->token_size;
- if(src->src[src->where_in_src]!='#' || src->is_in_the_begining_of_line!=1)
- src->is_in_the_begining_of_line=0;
- if(src->src[src->where_in_src]=='\n')
- {
- return '\n';
- }
-
- if(src->src[src->where_in_src]=='\0')
- return src->src[src->where_in_src];
- else
- return src->src[src->where_in_src++];
- }
- }
- void src_ungetc(struct Source_File *src)
- {
- --src->where_in_src;
- if(src->src[src->where_in_src]=='\n')
- {
- --src->which_row;
- src->which_column=0;
- }
- }
- struct token* src_extract_token(struct Source_File *src,enum LEXER_TYPE kw)
- {
- struct token *ret;
- ret=wonky_malloc(sizeof(struct token));
- ret->type=kw;
-
- ret->data_size=src->best_token_size;
- ret->column=src->best_token_column;
- ret->line=src->best_token_line;
- ret->data=src->src+src->best_token_where_in_src_start;
- ret->filename=src->src_name->filename;
- handle_splicing(ret);
- src->where_in_src=src->best_token_where_in_src_end;
- src->is_in_the_begining_of_line=src->best_token_beg_line;
- return ret;
- }
-
- void src_reset_token_data(struct Source_File *src,char use_src_as_base)
- {
- src->token_size=0;
- src->best_token_size=0;
- src->best_token_line=src->which_row;
- src->best_token_column=src->which_column;
- if(use_src_as_base)
- {
- src->best_token_where_in_src_end=src->where_in_src;
- }else
- {
- src->where_in_src=src->best_token_where_in_src_end;
- }
- src->best_token_where_in_src_start=src->where_in_src;
- }
- void src_assimilate_into_best_token(struct Source_File *src)
- {
- src->best_token_size=src->token_size;
- src->best_token_line=src->which_row;
- src->best_token_column=src->which_column;
- src->best_token_where_in_src_end=src->where_in_src;
- src->best_token_beg_line=src->is_in_the_begining_of_line;
- }
- void delete_source_file(struct Source_File *src)
- {
- delete_source_name(src->src_name);
- wonky_free(src->src);
- wonky_free(src);
- }
- void delete_source_name(struct Source_Name *name)
- {
- wonky_free(name->filename);
- wonky_free(name->base);
- wonky_free(name);
- }
- void goto_new_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
- char hold_char;
- while( (hold_char=src_getc(src,1,1,0)) != '\0' && hold_char != '\n');
- src->is_in_the_begining_of_line=1;
- ++src->where_in_src;
- ++src->which_row;
- src->which_column=0;
- }
- void chase_new_line(struct Source_File *src,struct Translation_Data *translation_data)
- {
- char hold_char;
- for(hold_char=src_getc(src,1,1,0);hold_char!='\n' && hold_char!='\0';
- hold_char=src_getc(src,1,1,0));
-
- ++src->where_in_src;
- ++src->which_row;
- src->is_in_the_begining_of_line=1;
- src->which_column=0;
-
-
- }
- /*returns the number of bytes skipped*/
- size_t skip_line_splice(struct Source_File *src)
- {
- size_t current_size=0;
- while(src->where_in_src<src->src_size-1 && src->src[src->where_in_src]=='\\' && src->src[src->where_in_src+1]=='\n')
- {
- src->where_in_src+=2;
- current_size+=2;
- }
- return current_size;
- }
-
- void skip_white_space(struct Source_File *src,char skip_new_line)
- {
- char hold_char;
- while(hold_char=src_getc(src,1,1,skip_new_line))
- {
- if(hold_char=='\n' && !skip_new_line)
- {
- return ;
- }
- if(hold_char!=' ' && hold_char!='\t')
- {
- src_ungetc(src);
- return ;
- }
- }
- }
- /*where_to_search ends in a NULL pointer*/
- struct Source_File* get_source_file(char *filename,char **where_to_search)
- {
- FILE *in;
- char *temp_name;
- char is_directory=0;
- struct Source_Name *name;
- struct Source_File *file;
-
- wonky_assert(where_to_search!=NULL);
- wonky_assert(*where_to_search!=NULL);
- do
- {
- temp_name=gstr_append(*where_to_search,filename);
- in=fopen(temp_name,"r");
- wonky_free(temp_name);
- if(in==NULL)
- continue;
-
- name=get_source_name(filename,*where_to_search);
- file=extract_source_file(in,name);
- if(file!=NULL)
- {
- return file;
- }else
- {
- delete_source_name(name);
- }
- }while(*(++where_to_search));
- return NULL;
- }
- #endif
F diff --git a/src/semantics/program/source_file.h b/src/semantics/program/source_file.h deleted file mode 100644 --- a/src/semantics/program/source_file.h +++ /dev/null
- #ifndef WONKY_SOURCE_FILE_H
- #define WONKY_SOURCE_FILE_H WONKY_SOURCE_FILE_H
- #include <source_file.hh>
- extern char *well_known_locations_base[];
- struct Source_Name
- {
- char *filename;
- char *base;
- };
- struct Source_Location
- {
- size_t line;
- size_t column;
- };
- struct Source_File
- {
- enum Source_Text_Type type;
- char *src;
- size_t src_size;
-
- struct Source_Name *src_name;
- };
- struct Source_Section
- {
- enum Source_Text_Type type;
- char *src;
- size_t src_size
-
- struct Source_Name *src_name;
- struct Source_Location *where_in_source;
- };
-
- struct Source_File* extract_source_file(FILE *in,struct Source_Name *name);
- struct Source_File* get_source_file(char *filename,char **where_to_search);
- struct Source_Name* get_source_name(char *filename,char *base);
- void normalise_source_name(struct Source_Name *name);
- char src_getc(struct Source_File *src,char skip_line_splice,char skip_comments,char skip_new_line);
- void src_ungetc(struct Source_File *src);
- struct token* src_extract_token(struct Source_File *src,enum LEXER_TYPE kw);
- void src_reset_token_data(struct Source_File *src,char use_src_as_base);
- void src_assimilate_into_best_token(struct Source_File *src);
- void chase_new_line(struct Source_File *src,struct Translation_Data *translation_data);
- void goto_new_line(struct Source_File *src,struct Translation_Data *translation_data);
- void skip_white_space(struct Source_File *src,char skip_new_line);
- size_t skip_line_splice(struct Source_File *src);
- void delete_source_file(struct Source_File *src);
- void delete_source_name(struct Source_Name *name);
- #endif
F diff --git a/src/semantics/program/source_file.hh b/src/semantics/program/source_file.hh deleted file mode 100644 --- a/src/semantics/program/source_file.hh +++ /dev/null
- #ifndef WONKY_SOURCE_FILE_HH
- #define WONKY_SOURCE_FILE_HH WONKY_SOURCE_FILE_HH
-
- struct Source_Name;
- struct Source_Location;
- struct Source_File;
- struct Source_Section;
- enum Source_Text_Type;
-
- #endif
F diff --git a/src/semantics/program/token.c b/src/semantics/program/token.c deleted file mode 100644 --- a/src/semantics/program/token.c +++ /dev/null
- #ifndef WONKY_TOKEN_C
- #define WONKY_TOKEN_C WONKY_TOKEN_C
- #include <token.h>
-
- struct token* copy_token(struct token *src)
- {
- struct token *cpy;
- cpy=wonky_malloc(sizeof(struct token));
- *cpy=*src;
- return cpy;
- }
-
- char compare_tokens(struct token *a,struct token *b)
- {
- size_t i;
- if(a->data_size!=b->data_size)
- return 0;
- for(i=0;i<a->data_size;++i)
- {
- if(a->data[i]!=b->data[i])
- return 0;
- }
- return 1;
- }
-
- struct identifier* get_identifier(char *data,size_t size)
- {
- struct identifier *ret;
- ret=wonky_malloc(sizeof(struct identifier));
- ret->size=size;
- ret->data=data;
- ret->last_defined_macro_with_this_id=NULL;
- ret->last_use_as_a_macro_argument=NULL;
-
- return ret;
- }
- struct token_identifier* get_id_token(struct identifier *id,struct Source_Location *location)
- {
- struct token_identifier *ret;
- ret=wonky_malloc(sizeof(struct token_identifier));
- ret->type=KW_ID;
- ret->location=location;
- ret->id=id;
-
-
- return ret;
- }
- struct token_keyword* get_keyword_token(enum LEXER_TYPE type,struct Source_Location *location)
- {
- struct token_keyword *ret;
- ret=wonky_malloc(sizeof(struct token_keyword));
- ret->type=type;
- ret->location=location;
-
- return ret;
- }
- struct token_punctuator* get_punctuator_token(enum LEXER_TYPE type,struct Source_Location *location)
- {
- struct token_punctuator *ret;
- ret=wonky_malloc(sizeof(struct token_punctuator));
- ret->type=type;
- ret->location=location;
- ret->punctuator_type=PUNCTUATOR_NORMAL;/*TODO*/
-
- return ret;
- }
- struct token_constant* get_constant_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size)
- {
- struct token_constant *ret;
- ret=wonky_malloc(sizeof(struct token_constant));
- ret->location=location;
- ret->type=bare_type;/*TODO*/
-
- switch(bare_type)
- {
-
- case KW_HEXADECIMAL_CONSTANT:
- function(KW_HEXADECIMAL_CONSTANT);
- break;
- case KW_DECIMAL_CONSTANT:
- function(KW_DECIMAL_CONSTANT);
- break;
- case KW_OCTAL_CONSTANT:
- function(KW_OCTAL_CONSTANT);
- break;
- case KW_UNSIGNED_DECIMAL_CONSTANT:
- function(KW_UNSIGNED_DECIMAL_CONSTANT);
- break;
- case KW_UNSIGNED_OCTAL_CONSTANT:
- function(KW_UNSIGNED_OCTAL_CONSTANT);
- break;
- case KW_UNSIGNED_HEXADECIMAL_CONSTANT:
- function(KW_UNSIGNED_HEXADECIMAL_CONSTANT);
- break;
- case KW_UNSIGNED_LONG_HEXADECIMAL_CONSTANT:
- function(KW_UNSIGNED_LONG_HEXADECIMAL_CONSTANT);
- break;
- case KW_UNSIGNED_LONG_OCTAL_CONSTANT:
- function(KW_UNSIGNED_LONG_OCTAL_CONSTANT);
- break;
- case KW_UNSIGNED_LONG_DECIMAL_CONSTANT:
- function(KW_UNSIGNED_LONG_DECIMAL_CONSTANT);
- break;
- case KW_UNSIGNED_LONG_LONG_DECIMAL_CONSTANT:
- function(KW_UNSIGNED_LONG_LONG_DECIMAL_CONSTANT);
- break;
- case KW_UNSIGNED_LONG_LONG_HEXADECIMAL_CONSTANT:
- function(KW_UNSIGNED_LONG_LONG_HEXADECIMAL_CONSTANT);
- break;
- case KW_UNSIGNED_LONG_LONG_OCTAL_CONSTANT:
- function(KW_UNSIGNED_LONG_LONG_OCTAL_CONSTANT);
- break;
- case KW_LONG_HEXADECIMAL_CONSTANT:
- function(KW_LONG_HEXADECIMAL_CONSTANT);
- break;
- case KW_LONG_OCTAL_CONSTANT:
- function(KW_LONG_OCTAL_CONSTANT);
- break;
- case KW_LONG_DECIMAL_CONSTANT:
- function(KW_LONG_DECIMAL_CONSTANT);
- break;
- case KW_LONG_LONG_HEXADECIMAL_CONSTANT:
- function(KW_LONG_LONG_HEXADECIMAL_CONSTANT);
- break;
- case KW_LONG_LONG_OCTAL_CONSTANT:
- function(KW_LONG_LONG_OCTAL_CONSTANT);
- break;
- case KW_LONG_LONG_DECIMAL_CONSTANT:
- function(KW_LONG_LONG_DECIMAL_CONSTANT);
- break;
- case KW_DOUBLE_DECIMAL_CONSTANT:
- function(KW_DOUBLE_DECIMAL_CONSTANT);
- break;
- case KW_LONG_DOUBLE_DECIMAL_CONSTANT:
- function(KW_LONG_DOUBLE_DECIMAL_CONSTANT);
- break;
- case KW_FLOAT_DECIMAL_CONSTANT:
- function(KW_FLOAT_DECIMAL_CONSTANT);
- break;
- case KW_DOUBLE_HEXADECIMAL_CONSTANT:
- function(KW_DOUBLE_HEXADECIMAL_CONSTANT);
- break;
- case KW_LONG_DOUBLE_HEXADECIMAL_CONSTANT:
- function(KW_LONG_DOUBLE_HEXADECIMAL_CONSTANT);
- break;
- case KW_FLOAT_HEXADECIMAL_CONSTANT:
- function(KW_FLOAT_HEXADECIMAL_CONSTANT);
- break;
- case KW_CHAR_CONSTANT:
- function(KW_CHAR_CONSTANT);
- break;
- case KW_WIDE_CHAR_CONSTANT:
- function(KW_WIDE_CHAR_CONSTANT);
- break;
- default:
- wonky_assert(SHOULD_NOT_REACH_HERE);
- }
-
- return ret;
- }
- struct token_string* get_string_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size)
- {
- struct token_string *ret;
- ret=wonky_malloc(sizeof(struct token_string));
- ret->type=bare_type;
- ret->location=location;
- ret->size=size;
- ret->data=data;
-
-
- return ret;
- }
- struct token_include_directive* get_include_directive_token(struct Source_Location *location,struct Queue *tokens)
- {
- struct token_include_directive *ret;
- ret=wonky_malloc(sizeof(struct token_string));
- ret->location=location;
- ret->tokens=tokens;
- return ret;
- }
- struct token_if_directive* get_if_directive_token(struct Source_Location *location,struct Queue *controlling_tokens,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive)
- {
- struct token_if_directive *ret;
- ret=wonky_malloc(sizeof(struct token_if_directive));
- ret->
-
- }
-
- struct token_ifdef_directive* get_ifdef_directive_token(struct Source_Location *location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
-
- struct token_ifdef_directive* get_ifdef_directive_token(struct Source_Location *location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
-
- struct token_normal_define_directive* get_normal_define_directive_token(struct Source_Location *location,struct identifier *id,struct Queue *replacement_tokens);
-
- struct token_functionlike_define_directive* get_functionlike_define_directive_token(struct Source_Location *location,struct identifier *id,struct Queue *argument_id_list_tokens,struct Queue *replacement_tokens);
-
- struct token_undef_directive* get_undef_directive_token(struct Source_Location *location,struct identifier *id);
- struct token_line_directive* get_line_directive_token(struct Source_Location *location,struct Source_Location *new_location);
- struct token_error_directive* get_error_directive_token(struct Source_Location *location,struct token_string *error_message);
- struct token_pragma_directive* get_pragma_directive(struct Source_Location *location,enum Pragma_Type type);
- struct token_defined_unary_operator* get_defined_unary_operator(struct Source_Location *location,struct identifier *id);
- #endif
F diff --git a/src/semantics/program/token.h b/src/semantics/program/token.h deleted file mode 100644 --- a/src/semantics/program/token.h +++ /dev/null
- #ifndef WONKY_TOKEN_H
- #define WONKY_TOKEN_H WONKY_TOKEN_H
- #include <token.hh>
-
- #include <automata.h>
- #include <constant.h>
- #include <wonky_malloc.h>
- #include <wonky_assert.h>
- #include <source_file.h>
-
- struct token
- {
- enum LEXER_TYPE type;
- };
- struct identifier /*there is only one of these per id*/
- {
- size_t size;
- char *data;
- struct token *last_defined_macro_with_this_id;
- struct functionlike_define_directive_argument *last_use_as_a_macro_argument;
- };
- struct token_identifier
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct identifier *id;
- };
- struct token_keyword
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- };
- struct token_punctuator
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- enum Punctuator_Token_Type punctuator_type;
- };
- struct token_constant
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct Constant *constant;
- };
- struct token_string
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- enum String_Token_Type string_type;
- size_t size;
- char *data;
- };
- struct token_include_directive
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct Queue *tokens;
- };
- struct token_if_directive
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct Queue *controlling_expression;
- struct Queue_Node *if_true;
- struct Queue_Node *if_false;
- struct Queue_Node *end_of_if_directive;
- };
- struct token_ifdef_directive
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct token_identifier *id;
- struct Queue_Node *if_defined;
- struct Queue_Node *if_undefined;
- struct Queue_Node *end_of_ifdef_directive;
- };
- struct token_ifndef_directive
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct token_identifier *id;
- struct Queue_Node *if_undefined;
- struct Queue_Node *if_defined;
- struct Queue_Node *end_of_ifndef_directive;
-
- };
- struct token_normal_define_directive
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct token_identifier *id;
- struct Queue *replacement_tokens;
- struct Translation_Unit *the_last_place_this_macro_was_defined;
- };
- struct functionlike_define_directive_argument
- {
- struct token_functionlike_define_directive *belongs_to;
- struct token *first_in_argument_substitution_tokens;
- size_t number_of_substitution_tokens;
- };
- struct token_functionlike_define_directive
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct identifier *id;
- struct Queue *arguments;
- struct Queue *replacement_tokens;
- struct Translation_Unit *the_last_place_this_macro_was_defined;
- };
- struct token_undef_directive
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct identifier *id;
- };
- struct token_line_directive
- {
- enum LEXER_TYPE type;
- struct Source_Location *real_location;
- struct Source_Location *new_location;
- };
- struct token_error_directive
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct token_string *error_message;
- };
- struct token_pragma_directive
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- enum Pragma_Type pragma_type;
- };
- struct token_defined_unary_operator
- {
- enum LEXER_TYPE type;
- struct Source_Location *location;
- struct identifier *id;
- };
- struct token_unlexed_source_part
- {
- enum LEXER_TYPE type;
- struct Source_Section *section;
- };
-
- /*
- * OLD TOKEN STRUCT
- struct token
- {
- enum LEXER_TYPE type;
- size_t data_size;
- char *data;
- size_t line,column;
- const char *filename;
- };
- */
- struct token* copy_token(struct token *src);
- void handle_splicing(struct token *word);
- char compare_tokens(struct token *a,struct token *b);
-
-
- struct identifier* get_identifier(char *data,size_t size);
- struct token_identifier* get_id_token(struct identifier *id,struct Source_Location *location);
- struct token_keyword* get_keyword_token(enum LEXER_TYPE type,struct Source_Location *location);
- struct token_punctuator* get_punctuator_token(enum LEXER_TYPE type,struct Source_Location *location);
- struct token_constant* get_constant_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size);
- struct token_string* get_string_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size);
- struct token_include_directive* get_include_directive_token(struct Source_Location *location,struct Queue *tokens);
- struct token_if_directive* get_if_directive_token(struct Source_Location *location,struct Queue *controlling_tokens,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
-
- struct token_ifdef_directive* get_ifdef_directive_token(struct Source_Location *location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
-
- struct token_ifdef_directive* get_ifdef_directive_token(struct Source_Location *location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
-
- struct token_normal_define_directive* get_normal_define_directive_token(struct Source_Location *location,struct identifier *id,struct Queue *replacement_tokens);
-
- struct token_functionlike_define_directive* get_functionlike_define_directive_token(struct Source_Location *location,struct identifier *id,struct Queue *argument_id_list_tokens,struct Queue *replacement_tokens);
-
- struct token_undef_directive* get_undef_directive_token(struct Source_Location *location,struct identifier *id);
- struct token_line_directive* get_line_directive_token(struct Source_Location *location,struct Source_Location *new_location);
- struct token_error_directive* get_error_directive_token(struct Source_Location *location,struct token_string *error_message);
- struct token_pragma_directive* get_pragma_directive(struct Source_Location *location,enum Pragma_Type type);
- struct token_defined_unary_operator* get_defined_unary_operator(struct Source_Location *location,struct identifier *id);
-
- #endif
F diff --git a/src/semantics/program/token.hh b/src/semantics/program/token.hh deleted file mode 100644 --- a/src/semantics/program/token.hh +++ /dev/null
- #ifndef WONKY_TOKEN_HH
- #define WONKY_TOKEN_HH WONKY_TOKEN_HH
-
- struct token;
- struct identifier;
- struct token_identifier;
- struct token_keyword;
- struct token_punctuator;
- struct token_constant;
- struct token_string;
- struct token_include_directive;
- struct token_if_directive;
- struct token_ifdef_directive;
- struct token_ifndef_directive;
- struct token_normal_define_directive;
- struct functionlike_define_directive_argument;
- struct token_functionlike_define_directive;
- struct token_undef_directive;
- struct token_line_directive;
- struct token_error_directive;
- struct token_pragma_directive;
- struct token_defined_unary_operator;
-
- enum Punctuator_Token_Type
- {
- PUNCTUATOR_NORMAL,
- PUNCTUATOR_DIGRAPH,
- PUNCTUATOR_TRIGRAPH,
- PUNCTUATOR_TYPE_END
- };
- enum Constant_Token_Encoding
- {
- CONSTANT_TOKEN_HEXADECIMAL,
- CONSTANT_TOKEN_DECIMAL,
- CONSTANT_TOKEN_OCTAL,
- CONSTANT_TOKEN_TYPE_END
- };
- enum Constant_Token_Specifier
- {
- CONSTANT_TOKEN_SPECIFIER_NONE,
- CONSTANT_TOKEN_SPECIFIER_LONG,
- CONSTANT_TOKEN_SPECIFIER_LONG_LONG,
- CONSTANT_TOKEN_SPECIFIER_END
-
- };
- enum Constant_Token_Signedness
- {
- CONSTANT_TOKEN_SIGNED,
- CONSTANT_TOKEN_UNSIGNED,
- CONSTANT_TOKEN_SIGNEDNESS_END
- };
- enum String_Token_Type
- {
- STRING_TOKEN_NORMAL,
- STRING_TOKEN_WIDE,
- STRING_TOKEN_TYPE_END
- };
- enum Pragma_Type
- {
- PRAGMA_TYPE_END
- };
-
- #endif
F diff --git a/src/syntax/automatas/automata.c b/src/syntax/automatas/automata.c new file mode 100644 --- /dev/null +++ b/src/syntax/automatas/automata.c
+ #ifndef WONKY_AUTOMATA_C
+ #define WONKY_AUTOMATA_C WONKY_AUTOMATA_C
+ #include <automata.h>
+
+ enum Source_Chars compress[256]
+ =
+ {
+ ['a']=CHAR_a,
+ ['b']=CHAR_b,
+ ['c']=CHAR_c,
+ ['d']=CHAR_d,
+ ['e']=CHAR_e,
+ ['f']=CHAR_f,
+ ['g']=CHAR_g,
+ ['h']=CHAR_h,
+ ['i']=CHAR_i,
+ ['j']=CHAR_j,
+ ['k']=CHAR_k,
+ ['l']=CHAR_l,
+ ['m']=CHAR_m,
+ ['n']=CHAR_n,
+ ['o']=CHAR_o,
+ ['p']=CHAR_p,
+ ['q']=CHAR_q,
+ ['r']=CHAR_r,
+ ['s']=CHAR_s,
+ ['t']=CHAR_t,
+ ['u']=CHAR_u,
+ ['v']=CHAR_v,
+ ['w']=CHAR_w,
+ ['x']=CHAR_x,
+ ['y']=CHAR_y,
+ ['z']=CHAR_z,
+ ['A']=CHAR_A,
+ ['B']=CHAR_B,
+ ['C']=CHAR_C,
+ ['D']=CHAR_D,
+ ['E']=CHAR_E,
+ ['F']=CHAR_F,
+ ['G']=CHAR_G,
+ ['H']=CHAR_H,
+ ['I']=CHAR_I,
+ ['J']=CHAR_J,
+ ['K']=CHAR_K,
+ ['L']=CHAR_L,
+ ['M']=CHAR_M,
+ ['N']=CHAR_N,
+ ['O']=CHAR_O,
+ ['P']=CHAR_P,
+ ['Q']=CHAR_Q,
+ ['R']=CHAR_R,
+ ['S']=CHAR_S,
+ ['T']=CHAR_T,
+ ['U']=CHAR_U,
+ ['V']=CHAR_V,
+ ['W']=CHAR_W,
+ ['X']=CHAR_X,
+ ['Y']=CHAR_Y,
+ ['Z']=CHAR_Z,
+ ['0']=CHAR_0,
+ ['1']=CHAR_1,
+ ['2']=CHAR_2,
+ ['3']=CHAR_3,
+ ['4']=CHAR_4,
+ ['5']=CHAR_5,
+ ['6']=CHAR_6,
+ ['7']=CHAR_7,
+ ['8']=CHAR_8,
+ ['9']=CHAR_9,
+ ['!']=CHAR_EXCLAMATION,
+ ['"']=CHAR_DOUBLE_QUOTE,
+ ['#']=CHAR_HASHTAG,
+ ['%']=CHAR_PERCENT,
+ ['&']=CHAR_AMPERSANT,
+ ['\'']=CHAR_SINGLE_QUOTE,
+ ['(']=CHAR_OPEN_NORMAL,
+ [')']=CHAR_CLOSE_NORMAL,
+ ['*']=CHAR_STAR,
+ ['+']=CHAR_PLUS,
+ [',']=CHAR_COMMA,
+ ['-']=CHAR_MINUS,
+ ['.']=CHAR_DOT,
+ ['/']=CHAR_FORWARD_SLASH,
+ [':']=CHAR_COLUMN,
+ [';']=CHAR_SEMI_COLUMN,
+ ['<']=CHAR_LESS,
+ ['=']=CHAR_EQUAL,
+ ['>']=CHAR_GREATER,
+ ['?']=CHAR_QUESTION,
+ ['[']=CHAR_OPEN_SQUARE,
+ ['\\']=CHAR_BACKWARD_SLASH,
+ ['^']=CHAR_HAT,
+ ['_']=CHAR_UNDERSCORE,
+ ['{']=CHAR_OPEN_CURLY,
+ ['|']=CHAR_PIPE,
+ ['}']=CHAR_CLOSE_CURLY,
+ ['~']=CHAR_TILDE,
+ [' ']=CHAR_SPACE,
+ ['\t']=CHAR_HORISONTAL_TAB,
+ ['\v']=CHAR_VERTICAL_TAB,
+ ['\n']=CHAR_FORM_FEED_TAB,
+ };
+
+ struct identifier defined_special_identifier
+ =
+ {
+ .size=sizeof("defined"),
+ .data="defined",
+ .last_defined_macro_with_this_id=(struct token*)&(struct token_defined_unary_operator){.type=PKW_DEFINE},
+ .last_use_as_a_macro_argument=NULL,
+ };
+
+ enum Source_Chars get_ch(const char *str,size_t limit)
+ {
+ return compress[*str];
+ }
+
+
+
+ #endif
F diff --git a/src/syntax/automatas/automata.h b/src/syntax/automatas/automata.h new file mode 100644 --- /dev/null +++ b/src/syntax/automatas/automata.h
+ #ifndef WONKY_AUTOMATA_H
+ #define WONKY_AUTOMATA_H WONKY_AUTOMATA_H
+ #include <automata.hh>
+ #include <stddef.h>
+ #include <identifier.h>
+ #include <token.h>
+
+ extern enum Source_Chars compress[256];
+ struct Automata_Node
+ {
+ enum Automata_Action action;
+ enum LEXER_TYPE keyword;
+ struct identifier *data;
+
+ struct Automata_Node *delta[CHAR_ENUM_END];
+
+ };
+
+ enum Source_Chars get_ch(const char *str,size_t limit);
+
+ extern struct Automata_Node chonky[];
+ extern struct Automata_Node chonky_jr[];
+ extern struct identifier defined_special_identifier;
+
+ #endif
F diff --git a/src/syntax/automatas/automata.hh b/src/syntax/automatas/automata.hh new file mode 100644 --- /dev/null +++ b/src/syntax/automatas/automata.hh
+ #ifndef WONKY_AUTOMATA_HH
+ #define WONKY_AUTOMATA_HH WONKY_AUTOMATA_HH
+
+ enum Automata_Action
+ {
+ AUTOMATA_ACTION_DISPENSE_TOKEN,
+ AUTOMATA_ACTION_SWITCH_AUTOMATA,
+ AUTOMATA_ACTION_MACRO_EXPANSION,
+ AUTOMATA_ACTION_NO_ACTION,
+ };
+
+ enum Source_Chars
+ {
+ CHAR_NONE=0,/*0 is used in initialisors so don't change this*/
+ CHAR_A,
+ CHAR_B,
+ CHAR_C,
+ CHAR_D,
+ CHAR_E,
+ CHAR_F,
+ CHAR_G,
+ CHAR_H,
+ CHAR_I,
+ CHAR_J,
+ CHAR_K,
+ CHAR_L,
+ CHAR_M,
+ CHAR_N,
+ CHAR_O,
+ CHAR_P,
+ CHAR_Q,
+ CHAR_R,
+ CHAR_S,
+ CHAR_T,
+ CHAR_U,
+ CHAR_V,
+ CHAR_W,
+ CHAR_X,
+ CHAR_Y,
+ CHAR_Z,
+ CHAR_a,
+ CHAR_b,
+ CHAR_c,
+ CHAR_d,
+ CHAR_e,
+ CHAR_f,
+ CHAR_g,
+ CHAR_h,
+ CHAR_i,
+ CHAR_j,
+ CHAR_k,
+ CHAR_l,
+ CHAR_m,
+ CHAR_n,
+ CHAR_o,
+ CHAR_p,
+ CHAR_q,
+ CHAR_r,
+ CHAR_s,
+ CHAR_t,
+ CHAR_u,
+ CHAR_v,
+ CHAR_w,
+ CHAR_x,
+ CHAR_y,
+ CHAR_z,
+ CHAR_0,
+ CHAR_1,
+ CHAR_2,
+ CHAR_3,
+ CHAR_4,
+ CHAR_5,
+ CHAR_6,
+ CHAR_7,
+ CHAR_8,
+ CHAR_9,
+ CHAR_EXCLAMATION,
+ CHAR_DOUBLE_QUOTE,
+ CHAR_HASHTAG,
+ CHAR_PERCENT,
+ CHAR_AMPERSANT,
+ CHAR_SINGLE_QUOTE,
+ CHAR_OPEN_NORMAL,
+ CHAR_CLOSE_NORMAL,
+ CHAR_STAR,
+ CHAR_PLUS,
+ CHAR_COMMA,
+ CHAR_MINUS,
+ CHAR_DOT,
+ CHAR_FORWARD_SLASH,
+ CHAR_COLUMN,
+ CHAR_SEMI_COLUMN,
+ CHAR_LESS,
+ CHAR_EQUAL,
+ CHAR_GREATER,
+ CHAR_QUESTION,
+ CHAR_OPEN_SQUARE,
+ CHAR_BACKWARD_SLASH,
+ CHAR_CLOSE_SQUARE,
+ CHAR_HAT,
+ CHAR_UNDERSCORE,
+ CHAR_OPEN_CURLY,
+ CHAR_PIPE,
+ CHAR_CLOSE_CURLY,
+ CHAR_TILDE,
+ CHAR_SPACE,
+ CHAR_HORISONTAL_TAB,
+ CHAR_VERTICAL_TAB,
+ CHAR_FORM_FEED_TAB,
+
+ CHAR_ENUM_END
+ };
+
+ enum LEXER_TYPE
+ {
+ KW_AUTO,
+ KW_DO,
+ KW_DOUBLE,
+ KW_INT,
+ KW_STRUCT,
+ KW_BREAK,
+ KW_ELSE,
+ KW_DEFINED,
+ KW_LONG,
+ KW_SWITCH,
+ KW_CASE,
+ KW_ENUM,
+ KW_REGISTER,
+ KW_TYPEDEF,
+ KW_CHAR,
+ KW_EXTERN,
+ KW_RETURN,
+ KW_UNION,
+ KW_CONST,
+ KW_FLOAT,
+ KW_SHORT,
+ KW_UNSIGNED,
+ KW_CONTINUE,
+ KW_FOR,
+ KW_SIGNED,
+ KW_VOID,
+ KW_DEFAULT,
+ KW_GOTO,
+ KW_SIZEOF,
+ KW_VOLATILE,
+ KW_IF,
+ KW_STATIC,
+ KW_WHILE,
+ KW_EXCLAMATION,
+ KW_PERCENT,
+ KW_AND,
+ KW_AND_AND,
+ KW_OPEN_NORMAL,
+ KW_CLOSE_NORMAL,
+ KW_STAR,
+ KW_PLUS,
+ KW_COMMA,
+ KW_MINUS,
+ KW_DOT,
+ KW_ARROW,
+ KW_COLUMN,
+ KW_SEMI_COLUMN,
+ KW_LESS,
+ KW_EQ,
+ KW_EQEQ,
+ KW_MORE,
+ KW_QUESTION,
+ KW_HAT,
+ KW_PIPE,
+ KW_PIPE_PIPE,
+ KW_TILDE,
+ KW_PLUSPLUS,
+ KW_MINUSMINUS,
+ KW_SHIFT_RIGHT,
+ KW_SHIFT_LEFT,
+ KW_LESS_EQ,
+ KW_MORE_EQ,
+ KW_NOT_EQ,
+ KW_PLUS_EQ,
+ KW_MINUS_EQ,
+ KW_STAR_EQ,
+ KW_PERCENT_EQ,
+ KW_SHIFT_LEFT_EQ,
+ KW_SHIFT_RIGHT_EQ,
+ KW_AND_EQ,
+ KW_HAT_EQ,
+ KW_PIPE_EQ,
+ KW_HASHTAG,
+ KW_HASHTAG_HASHTAG,
+ KW_ELIPSIS,
+ KW_DIV,
+ KW_INLINE,
+ KW_RESTRICT,
+ KW_BOOL,
+ KW_COMPLEX,
+ KW_IMAGINARY,
+ KW_OPEN_SQUARE,
+ KW_CLOSE_SQUARE,
+ KW_CLOSE_CURLY,
+ KW_OPEN_CURLY,
+ KW_DIV_EQ,
+ KW_FORWARD_SLASH,
+ KW_NOTYPE,
+ KW_HEXADECIMAL_CONSTANT,
+ KW_DECIMAL_CONSTANT,
+ KW_OCTAL_CONSTANT ,
+ KW_UNSIGNED_DECIMAL_CONSTANT,
+ KW_UNSIGNED_OCTAL_CONSTANT,
+ KW_UNSIGNED_HEXADECIMAL_CONSTANT,
+ KW_UNSIGNED_LONG_HEXADECIMAL_CONSTANT,
+ KW_UNSIGNED_LONG_OCTAL_CONSTANT,
+ KW_UNSIGNED_LONG_DECIMAL_CONSTANT,
+ KW_UNSIGNED_LONG_LONG_DECIMAL_CONSTANT,
+ KW_UNSIGNED_LONG_LONG_HEXADECIMAL_CONSTANT,
+ KW_UNSIGNED_LONG_LONG_OCTAL_CONSTANT,
+ KW_LONG_HEXADECIMAL_CONSTANT,
+ KW_LONG_OCTAL_CONSTANT,
+ KW_LONG_DECIMAL_CONSTANT,
+ KW_LONG_LONG_HEXADECIMAL_CONSTANT,
+ KW_LONG_LONG_OCTAL_CONSTANT,
+ KW_LONG_LONG_DECIMAL_CONSTANT,
+ KW_DOUBLE_DECIMAL_CONSTANT,
+ KW_LONG_DOUBLE_DECIMAL_CONSTANT,
+ KW_FLOAT_DECIMAL_CONSTANT,
+ KW_DOUBLE_HEXADECIMAL_CONSTANT,
+ KW_LONG_DOUBLE_HEXADECIMAL_CONSTANT,
+ KW_FLOAT_HEXADECIMAL_CONSTANT,
+ KW_COMMENT,
+ KW_ID,
+ KW_CHAR_CONSTANT,
+ KW_WIDE_CHAR_CONSTANT,
+ KW_STRING,
+ KW_WIDE_STRING,
+ PKW_IF,
+ PKW_IFDEF,
+ PKW_IFNDEF,
+ PKW_ELIF,
+ PKW_ELSE,
+ PKW_ENDIF,
+ PKW_INCLUDE,
+ PKW_FUNCTIONLIKE_DEFINE,
+ PKW_DEFINE,
+ PKW_UNDEF,
+ PKW_LINE,
+ PKW_ERROR,
+ PKW_PRAGMA,
+ PKW_COMMENT,
+ PKW_NOTYPE,
+ LT_EOF,
+
+ LEXER_TYPE_END
+ };
+
+ struct Automata_Node;
+
+ #endif
F diff --git a/src/syntax/automatas/generator/generator.c b/src/syntax/automatas/generator/generator.c new file mode 100644 --- /dev/null +++ b/src/syntax/automatas/generator/generator.c
+ #ifndef WONKY_AUTOMATA_GENERATOR_C
+ #define WONKY_AUTOMATA_GENERATOR_C WONKY_AUTOMATA_GENERATOR_C
+ #include <generator.h>
+
+
+ int main()
+ {
+ wonky_memory_init();
+
+ print_automatas();
+
+ wonky_memory_delete();
+
+ return 0;
+ }
+
+ void print_automatas()
+ {
+ FILE *chonky_c;
+
+ chonky_c=fopen("chonky.c","w");
+ if(chonky_c==NULL)
+ {
+ fprintf(stderr,"Could not write to chonky.c\nAborting!\n");
+ exit(1);
+ }
+
+ fprintf(chonky_c,"#ifndef WONKY_CHONKY_C\n#define WONKY_CHONKY_C WONKY_CHONKY_C\n#include <stddef.h>\n#include <automata.h>\n\n");
+ print_automata(make_chonky(),"chonky",chonky_c);
+ fprintf(chonky_c,"\n#endif");
+ }
+ void print_automata(struct Generator_Node *root,const char *name,FILE *out)
+ {
+ fprintf(out,"struct Automata_Node %s[]=\n{\n",name);
+ print_automata_nodes(root,name,out);
+ fprintf(out,"\n};",name);
+ }
+ void print_automata_nodes(struct Generator_Node *node,const char *name,FILE *out)
+ {
+ int i;
+ ssize_t node_number=0;
+ struct Queue *node_queue;
+ struct Generator_Node *current_node;
+ struct Generator_Node *adjacent_node;
+
+ node_queue=wonky_malloc(sizeof(struct Queue));
+ Queue_Init(node_queue);
+
+ Queue_Push(node_queue,node);
+ node->node_number=0;
+
+ while(node_queue->size>0)
+ {
+ current_node=Queue_Pop(node_queue);
+ fprintf(out,"{ \n.action=%s,\n.keyword=%s,\n.data=%s,\n.delta={",current_node->action_string,current_node->kw_string,current_node->data_string);
+ for(i=0;i<CHAR_ENUM_END;++i)
+ if(current_node->output.delta[i]!=NULL)
+ {
+ adjacent_node=(struct Generator_Node*)current_node->output.delta[i];
+ if(adjacent_node->node_number==NODE_NOT_NUMBERED)
+ {
+ adjacent_node->node_number=++node_number;
+ Queue_Push(node_queue,adjacent_node);
+ }
+ fprintf(out,"%s+%zd ,",name,adjacent_node->node_number);
+ }else
+ {
+ fprintf(out,"NULL, ");
+ }
+
+ fprintf(out,"}\n},\n");
+ }
+
+
+ wonky_assert(node_queue->size==0);
+ wonky_free(node_queue);
+ }
+ struct Generator_Node* make_chonky()
+ {
+ struct Generator_Node *ret;
+ ret=make_generator(chonky_keywords,number_of_chonky_keywords);
+
+ add_finishing_float_nodes(ret,0);
+ add_number_nodes(ret);
+ add_string_char_nodes(ret);
+ add_id_nodes(ret);
+
+ return ret;
+ }
+ struct Generator_Node* make_generator(const struct Keyword_Entry keywords[],size_t number_of_keywords)
+ {
+ size_t i;
+ struct Generator_Node *ret;
+
+ ret=get_generator_node(null_str,no_type_str,automata_no_action_str);
+
+ for(i=0;i<number_of_keywords;++i)
+ insert_keyword(ret,keywords+i);
+
+ return ret;
+
+ }
+ struct Generator_Node* insert_keyword(struct Generator_Node *node,const struct Keyword_Entry *entry)
+ {
+ size_t where_in_keyword;
+ struct Generator_Node *current;
+
+ for(where_in_keyword=0,current=node;
+ entry->keyword[where_in_keyword]!='\0' && entry->keyword[where_in_keyword+1]!='\0';
+ current=(struct Generator_Node*)current->output.delta[get_ch(entry->keyword+where_in_keyword,1)],++where_in_keyword)
+ {
+ if(current->output.delta[get_ch(entry->keyword+where_in_keyword,1)]==NULL)
+ current->output.delta[get_ch(entry->keyword+where_in_keyword,1)]=(struct Automata_Node*)get_generator_node(null_str,no_type_str,automata_no_action_str);
+ }
+
+ if(current->output.delta[get_ch(entry->keyword+where_in_keyword,1)]==NULL)
+ current->output.delta[get_ch(entry->keyword+where_in_keyword,1)]=(struct Automata_Node*)get_generator_node(entry->data_string,entry->kw_string,entry->action_string);
+ else
+ {
+ ((struct Generator_Node*)current->output.delta[get_ch(entry->keyword+where_in_keyword,1)])->kw_string=entry->kw_string;
+ ((struct Generator_Node*)current->output.delta[get_ch(entry->keyword+where_in_keyword,1)])->data_string=entry->data_string;
+ ((struct Generator_Node*)current->output.delta[get_ch(entry->keyword+where_in_keyword,1)])->action_string=entry->action_string;
+ }
+
+ return (struct Generator_Node*)current->output.delta[get_ch(entry->keyword+where_in_keyword,1)];
+ }
+ struct Generator_Node* get_generator_node(const char *data_string,const char *kw_string,const char *action_string)
+ {
+ struct Generator_Node *ret;
+
+ ret=wonky_calloc(1,sizeof(struct Generator_Node));
+ ret->node_number=NODE_NOT_NUMBERED;
+ ret->data_string=data_string;
+ ret->kw_string=kw_string;
+ ret->action_string=action_string;
+
+ return ret;
+ }
+ /*requires that there are no loops in the automata*/
+ void add_id_nodes(struct Generator_Node *node)
+ {
+ struct Generator_Node *id_node;
+ struct Queue *node_queue;
+ struct Generator_Node *current_node;
+ _Bool push_nodes;
+
+ id_node=get_generator_node(null_str,id_type_str,automata_dispense_token_str);
+
+ node_queue=wonky_malloc(sizeof(struct Queue));
+ Queue_Init(node_queue);
+
+ Queue_Push(node_queue,id_node);
+
+ connect_node(node,id_node,node_queue,CHAR_a,CHAR_z,1);
+ connect_node(node,id_node,node_queue,CHAR_A,CHAR_Z,1);
+ connect_node(node,id_node,node_queue,CHAR_UNDERSCORE,CHAR_UNDERSCORE,1);
+
+ while(node_queue->size>0)
+ {
+ current_node=Queue_Pop(node_queue);
+
+ if(current_node->kw_string==no_type_str)
+ {
+ current_node->kw_string=id_type_str;
+ current_node->action_string=automata_dispense_token_str;
+ push_nodes=1;
+
+ }else
+ {
+ push_nodes=0;
+ }
+ connect_node(current_node,id_node,node_queue,CHAR_a,CHAR_z,push_nodes);
+ connect_node(current_node,id_node,node_queue,CHAR_A,CHAR_Z,push_nodes);
+ connect_node(current_node,id_node,node_queue,CHAR_0,CHAR_9,push_nodes);
+ connect_node(current_node,id_node,node_queue,CHAR_UNDERSCORE,CHAR_UNDERSCORE,push_nodes);
+
+ }
+
+ wonky_assert(node_queue->size==0);
+ wonky_free(node_queue);
+
+ }
+ void add_number_nodes(struct Generator_Node *node)
+ {
+ struct Generator_Node *oct_hex_split;
+
+ add_integer_suffix(add_decimal_number_nodes(node),"KW_LONG_DECIMAL_CONSTANT","KW_LONG_LONG_DECIMAL_CONSTANT");
+
+ oct_hex_split=get_generator_node(null_str,"KW_OCTAL_CONSTANT",automata_dispense_token_str);
+ connect_node(node,oct_hex_split,NULL,CHAR_0,CHAR_0,0);
+ add_integer_suffix(oct_hex_split,"KW_LONG_OCTAL_CONSTANT","KW_LONG_LONG_OCTAL_CONSTANT");
+
+ add_integer_suffix(add_hexadecimal_number_nodes(oct_hex_split),"KW_LONG_HEXADECIMAL_CONSTANT","KW_LONG_LONG_HEXADECIMAL_CONSTANT");
+ add_integer_suffix(add_octal_number_nodes(oct_hex_split),"KW_LONG_OCTAL_CONSTANT","KW_LONG_LONG_OCTAL_CONSTANT");
+ }
+ struct Generator_Node* add_decimal_number_nodes(struct Generator_Node *node)
+ {
+ struct Generator_Node *decimal_node;
+ decimal_node=get_generator_node(null_str,"KW_DECIMAL_CONSTANT",automata_dispense_token_str);
+ connect_node(node,decimal_node,NULL,CHAR_1,CHAR_9,0);
+ connect_node(decimal_node,decimal_node,NULL,CHAR_0,CHAR_9,0);
+ add_finishing_float_nodes(decimal_node,1);
+ return decimal_node;
+ }
+ /*the leading 0 has been taken from add_number_nodes*/
+ struct Generator_Node* add_hexadecimal_number_nodes(struct Generator_Node *node)
+ {
+ struct Generator_Node *hexadecimal_node_start;
+ struct Generator_Node *hexadecimal_node;
+
+ hexadecimal_node_start=get_generator_node(null_str,no_type_str,automata_no_action_str);
+
+ connect_node(node,hexadecimal_node_start,NULL,CHAR_x,CHAR_x,0);
+ connect_node(node,hexadecimal_node_start,NULL,CHAR_X,CHAR_X,0);
+
+ hexadecimal_node=get_generator_node(null_str,"KW_HEXADECIMAL_CONSTANT",automata_dispense_token_str);
+
+ connect_node(hexadecimal_node_start,hexadecimal_node,NULL,CHAR_0,CHAR_9,0);
+ connect_node(hexadecimal_node_start,hexadecimal_node,NULL,CHAR_a,CHAR_f,0);
+ connect_node(hexadecimal_node_start,hexadecimal_node,NULL,CHAR_A,CHAR_F,0);
+
+ connect_node(hexadecimal_node,hexadecimal_node,NULL,CHAR_0,CHAR_9,0);
+ connect_node(hexadecimal_node,hexadecimal_node,NULL,CHAR_a,CHAR_f,0);
+ connect_node(hexadecimal_node,hexadecimal_node,NULL,CHAR_A,CHAR_F,0);
+
+
+ add_finishing_hexadecimal_float_nodes(hexadecimal_node,1);
+ return hexadecimal_node;
+ }
+ struct Generator_Node* add_octal_number_nodes(struct Generator_Node *node)
+ {
+ struct Generator_Node *octal_node;
+
+ add_finishing_float_nodes(node,1);
+
+ octal_node=get_generator_node(null_str,"KW_OCTAL_CONSTANT",automata_dispense_token_str);
+ connect_node(node,octal_node,NULL,CHAR_0,CHAR_7,0);
+ connect_node(octal_node,octal_node,NULL,CHAR_0,CHAR_7,0);
+
+ add_finishing_float_nodes(octal_node,1);
+ return octal_node;
+ }
+ void add_integer_suffix(struct Generator_Node *tail,const char *l,const char *ll)
+ {
+ struct Generator_Node *long_node;
+ struct Generator_Node *long_long_node;
+
+ long_node=get_generator_node(null_str,l,automata_dispense_token_str);
+ long_long_node=get_generator_node(null_str,ll,automata_dispense_token_str);
+
+ connect_node(tail,long_node,NULL,CHAR_l,CHAR_l,0);
+ connect_node(tail,long_node,NULL,CHAR_L,CHAR_L,0);
+
+ connect_node(long_node,long_long_node,NULL,CHAR_l,CHAR_l,0);
+ connect_node(long_node,long_long_node,NULL,CHAR_L,CHAR_L,0);
+ }
+ void add_string_char_nodes(struct Generator_Node *node)
+ {
+ struct Generator_Node *prefixed_string_node;
+
+ prefixed_string_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
+ connect_node(node,prefixed_string_node,NULL,CHAR_L,CHAR_L,0);
+
+ add_string_char_nodes_inner(prefixed_string_node,"KW_WIDE_STRING","KW_WIDE_CHAR_CONSTANT");
+ add_string_char_nodes_inner(node,"KW_STRING","KW_CHAR_CONSTANT");
+
+ }
+ void add_string_char_nodes_inner(struct Generator_Node *node,const char *str_kw,const char *char_kw)
+ {
+ struct Generator_Node *inner_string_node;
+ struct Generator_Node *ending_string_node;
+
+ struct Generator_Node *inner_char_node;
+ struct Generator_Node *ending_char_node;
+
+ inner_string_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
+ ending_string_node=get_generator_node(null_str,str_kw,automata_dispense_token_str);
+
+ inner_char_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
+ ending_char_node=get_generator_node(null_str,char_kw,automata_dispense_token_str);
+
+
+ connect_node(node,inner_char_node,NULL,CHAR_SINGLE_QUOTE,CHAR_SINGLE_QUOTE,0);
+ connect_node(inner_char_node,ending_char_node,NULL,CHAR_SINGLE_QUOTE,CHAR_SINGLE_QUOTE,0);
+ connect_node(inner_char_node,inner_char_node,NULL,0,CHAR_ENUM_END,0);
+
+ connect_node(node,inner_string_node,NULL,CHAR_DOUBLE_QUOTE,CHAR_DOUBLE_QUOTE,0);
+ connect_node(inner_string_node,ending_string_node,NULL,CHAR_DOUBLE_QUOTE,CHAR_DOUBLE_QUOTE,0);
+ connect_node(inner_string_node,inner_string_node,NULL,0,CHAR_ENUM_END,0);
+ }
+ void add_finishing_float_nodes(struct Generator_Node *node,_Bool has_read_digits)
+ {
+ struct Generator_Node *hold;
+ struct Generator_Node *hold2;
+ hold=add_fractional_constant(node,has_read_digits);
+ hold2=add_exponent_part(hold);
+ add_float_suffix(hold,"KW_FLOAT_DECIMAL_CONSTANT","KW_LONG_DOUBLE_DECIMAL_CONSTANT");
+ add_float_suffix(hold2,"KW_FLOAT_DECIMAL_CONSTANT","KW_LONG_DOUBLE_DECIMAL_CONSTANT");
+
+ }
+ void add_finishing_hexadecimal_float_nodes(struct Generator_Node *node,_Bool has_read_digits)
+ {
+ struct Generator_Node *hold;
+ struct Generator_Node *hold2;
+
+ hold=add_hexadecimal_fractional_constant(node,has_read_digits);
+ hold2=add_hexadecimal_exponent_part(hold);
+ add_float_suffix(hold,"KW_FLOAT_HEXADECIMAL_CONSTANT","KW_LONG_DOUBLE_HEXADECIMAL_CONSTANT");
+ add_float_suffix(hold2,"KW_FLOAT_HEXADECIMAL_CONSTANT","KW_LONG_DOUBLE_HEXADECIMAL_CONSTANT");
+ }
+ void add_float_suffix(struct Generator_Node *node,const char *f,const char *l)
+ {
+ struct Generator_Node *float_node;
+ struct Generator_Node *long_double_node;
+
+ float_node=get_generator_node(null_str,f,automata_dispense_token_str);
+ long_double_node=get_generator_node(null_str,l,automata_dispense_token_str);
+
+ connect_node(node,float_node,NULL,CHAR_f,CHAR_f,0);
+ connect_node(node,float_node,NULL,CHAR_F,CHAR_F,0);
+
+ connect_node(node,long_double_node,NULL,CHAR_l,CHAR_l,0);
+ connect_node(node,long_double_node,NULL,CHAR_L,CHAR_L,0);
+ }
+ struct Generator_Node* add_fractional_constant(struct Generator_Node *node,_Bool has_read_digits)
+ {
+ struct Generator_Node *dot_node;
+ struct Generator_Node *digit_node;
+
+ if(has_read_digits)
+ {
+ dot_node=get_generator_node(null_str,"KW_DOUBLE_DECIMAL_CONSTANT",automata_dispense_token_str);
+ connect_node(node,dot_node,NULL,CHAR_DOT,CHAR_DOT,0);
+ connect_node(dot_node,dot_node,NULL,CHAR_0,CHAR_9,0);
+
+ return dot_node;
+ }else
+ {
+ dot_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
+ digit_node=get_generator_node(null_str,"KW_DOUBLE_DECIMAL_CONSTANT",automata_dispense_token_str);
+ connect_node(node,dot_node,NULL,CHAR_DOT,CHAR_DOT,0);
+ connect_node(dot_node,digit_node,NULL,CHAR_0,CHAR_9,0);
+ connect_node(digit_node,digit_node,NULL,CHAR_0,CHAR_9,0);
+ return digit_node;
+ }
+
+
+ }
+ struct Generator_Node* add_hexadecimal_fractional_constant(struct Generator_Node *node,_Bool has_read_digits)
+ {
+ struct Generator_Node *dot_node;
+ struct Generator_Node *digit_node;
+
+ if(has_read_digits)
+ {
+ dot_node=get_generator_node(null_str,"KW_DOUBLE_HEXADECIMAL_CONSTANT",automata_dispense_token_str);
+ connect_node(node,dot_node,NULL,CHAR_DOT,CHAR_DOT,0);
+ connect_node(dot_node,dot_node,NULL,CHAR_0,CHAR_9,0);
+ connect_node(dot_node,dot_node,NULL,CHAR_a,CHAR_f,0);
+ connect_node(dot_node,dot_node,NULL,CHAR_A,CHAR_F,0);
+
+ return dot_node;
+ }else
+ {
+ dot_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
+ digit_node=get_generator_node(null_str,"KW_DOUBLE_HEXADECIMAL_CONSTANT",automata_dispense_token_str);
+ connect_node(node,dot_node,NULL,CHAR_DOT,CHAR_DOT,0);
+ connect_node(dot_node,digit_node,NULL,CHAR_0,CHAR_9,0);
+ connect_node(dot_node,digit_node,NULL,CHAR_a,CHAR_f,0);
+ connect_node(dot_node,digit_node,NULL,CHAR_A,CHAR_F,0);
+
+ connect_node(digit_node,digit_node,NULL,CHAR_0,CHAR_9,0);
+ connect_node(digit_node,digit_node,NULL,CHAR_a,CHAR_f,0);
+ connect_node(digit_node,digit_node,NULL,CHAR_A,CHAR_F,0);
+ return digit_node;
+ }
+
+
+ }
+ struct Generator_Node* add_exponent_part(struct Generator_Node *node)
+ {
+ struct Generator_Node *digit_node;
+ struct Generator_Node *e_node;
+
+ e_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
+ digit_node=get_generator_node(null_str,"KW_DOUBLE_DECIMAL_CONSTANT",automata_dispense_token_str);
+
+ connect_node(node,e_node,NULL,CHAR_e,CHAR_e,0);
+ connect_node(node,e_node,NULL,CHAR_E,CHAR_E,0);
+
+ connect_node(e_node,digit_node,NULL,CHAR_PLUS,CHAR_PLUS,0);
+ connect_node(e_node,digit_node,NULL,CHAR_MINUS,CHAR_MINUS,0);
+ connect_node(e_node,digit_node,NULL,CHAR_0,CHAR_9,0);
+
+ connect_node(digit_node,digit_node,NULL,CHAR_0,CHAR_9,0);
+
+ return digit_node;
+ }
+ struct Generator_Node* add_hexadecimal_exponent_part(struct Generator_Node *node)
+ {
+ struct Generator_Node *digit_node;
+ struct Generator_Node *p_node;
+
+ p_node=get_generator_node(null_str,no_type_str,automata_no_action_str);
+ digit_node=get_generator_node(null_str,"KW_DOUBLE_HEXADECIMAL_CONSTANT",automata_dispense_token_str);
+
+ connect_node(node,p_node,NULL,CHAR_p,CHAR_p,0);
+ connect_node(node,p_node,NULL,CHAR_P,CHAR_P,0);
+
+ connect_node(p_node,digit_node,NULL,CHAR_PLUS,CHAR_PLUS,0);
+ connect_node(p_node,digit_node,NULL,CHAR_MINUS,CHAR_MINUS,0);
+ connect_node(p_node,digit_node,NULL,CHAR_0,CHAR_9,0);
+
+ connect_node(digit_node,digit_node,NULL,CHAR_0,CHAR_9,0);
+
+ return digit_node;
+ }
+ void connect_node(struct Generator_Node *node,struct Generator_Node *target_node,struct Queue *node_queue,enum Source_Chars begin,enum Source_Chars end,_Bool push_nodes)
+ {
+ int i;
+ for(i=begin;i<=end;++i)
+ if(node->output.delta[i]==NULL)
+ node->output.delta[i]=(struct Automata_Node*)target_node;
+ else if(push_nodes)
+ Queue_Push(node_queue,node->output.delta[i]);
+
+ }
+ #endif
F diff --git a/src/syntax/automatas/generator/generator.h b/src/syntax/automatas/generator/generator.h new file mode 100644 --- /dev/null +++ b/src/syntax/automatas/generator/generator.h
+ #ifndef WONKY_AUTOMATA_GENERATOR_H
+ #define WONKY_AUTOMATA_GENERATOR_H WONKY_AUTOMATA_GENERATOR_H
+ #include <generator.hh>
+
+ #include <stdio.h>
+ #include <automata.h>
+ #include <queue.h>
+ #include <wonky_malloc.h>
+ #include <wonky_assert.h>
+ #include <keyword_list.h>
+
+
+ struct Generator_Node
+ {
+ ssize_t node_number;
+ const char *data_string;
+ const char *kw_string;
+ const char *action_string;
+ struct Automata_Node output;
+ };
+
+
+ void print_automatas();
+ void print_automata(struct Generator_Node *root,const char *name,FILE *out);
+ void print_automata_nodes(struct Generator_Node *node,const char *name,FILE *out);
+
+ struct Generator_Node* make_chonky();
+ struct Generator_Node* make_generator(const struct Keyword_Entry *keywords,size_t number_of_keywords);
+ struct Generator_Node* insert_keyword(struct Generator_Node *node,const struct Keyword_Entry *entry);
+ struct Generator_Node* get_generator_node(const char *data_string,const char *kw_string,const char *action_string);
+ void add_id_nodes(struct Generator_Node *node);
+
+ void add_number_nodes(struct Generator_Node *node);
+
+ /*these return the last generated node so we can add L and LL to them*/
+ struct Generator_Node* add_decimal_number_nodes(struct Generator_Node *node);
+ struct Generator_Node* add_hexadecimal_number_nodes(struct Generator_Node *node);
+ struct Generator_Node* add_octal_number_nodes(struct Generator_Node *node);
+
+ void add_integer_suffix(struct Generator_Node *tail,const char *l,const char *ll);
+
+ void add_string_char_nodes(struct Generator_Node *node);
+ void add_string_char_nodes_inner(struct Generator_Node *node,const char *str_kw,const char *char_kw);
+
+ void add_finishing_float_nodes(struct Generator_Node *node,_Bool has_read_digits);
+ void add_finishing_hexadecimal_float_nodes(struct Generator_Node *node,_Bool has_read_digits);
+ void add_float_suffix(struct Generator_Node *node,const char *f,const char *l);
+
+ struct Generator_Node* add_fractional_constant(struct Generator_Node *node,_Bool has_read_digits);
+ struct Generator_Node* add_hexadecimal_fractional_constant(struct Generator_Node *node,_Bool has_read_digits);
+
+ struct Generator_Node* add_exponent_part(struct Generator_Node *node);
+ struct Generator_Node* add_hexadecimal_exponent_part(struct Generator_Node *node);
+
+
+ void connect_node(struct Generator_Node *node,struct Generator_Node *target_node,struct Queue *node_queue,enum Source_Chars begin,enum Source_Chars end,_Bool push_nodes);
+
+ int main();
+ static const ssize_t NODE_NOT_NUMBERED=-1;
+
+ static const char *null_str="NULL";
+ static const char *no_type_str="KW_NOTYPE";
+ static const char *id_type_str="KW_ID";
+
+ static const char *automata_no_action_str="AUTOMATA_ACTION_NO_ACTION";
+ static const char *automata_dispense_token_str="AUTOMATA_ACTION_DISPENSE_TOKEN";
+
+ #endif
F diff --git a/src/syntax/automatas/generator/generator.hh b/src/syntax/automatas/generator/generator.hh new file mode 100644 --- /dev/null +++ b/src/syntax/automatas/generator/generator.hh
+ #ifndef WONKY_AUTOMATA_GENERATOR_HH
+ #define WONKY_AUTOMATA_GENERATOR_HH WONKY_AUTOMATA_GENERATOR_HH
+
+
+ struct Generator_Node;
+
+ #endif
F diff --git a/src/syntax/automatas/generator/keyword_list.c b/src/syntax/automatas/generator/keyword_list.c new file mode 100644 --- /dev/null +++ b/src/syntax/automatas/generator/keyword_list.c
+ #ifndef WONKY_KEYWORD_LIST_C
+ #define WONKY_KEYWORD_LIST_C WONKY_KEYWORD_LIST_C
+ #include <keyword_list.h>
+
+
+ struct Keyword_Entry chonky_keywords[]
+ =
+ {
+ {
+ .keyword="auto",
+ .kw_string="KW_AUTO",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="do",
+ .kw_string="KW_DO",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="double",
+ .kw_string="KW_DOUBLE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="int",
+ .kw_string="KW_INT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="struct",
+ .kw_string="KW_STRUCT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="break",
+ .kw_string="KW_BREAK",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="else",
+ .kw_string="KW_ELSE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="long",
+ .kw_string="KW_LONG",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="switch",
+ .kw_string="KW_SWITCH",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="case",
+ .kw_string="KW_CASE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="enum",
+ .kw_string="KW_ENUM",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="register",
+ .kw_string="KW_REGISTER",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="typedef",
+ .kw_string="KW_TYPEDEF",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="char",
+ .kw_string="KW_CHAR",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="extern",
+ .kw_string="KW_EXTERN",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="return",
+ .kw_string="KW_RETURN",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="union",
+ .kw_string="KW_UNION",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="const",
+ .kw_string="KW_CONST",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="float",
+ .kw_string="KW_FLOAT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="short",
+ .kw_string="KW_SHORT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="unsigned",
+ .kw_string="KW_UNSIGNED",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="continue",
+ .kw_string="KW_CONTINUE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="for",
+ .kw_string="KW_FOR",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="signed",
+ .kw_string="KW_SIGNED",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="void",
+ .kw_string="KW_VOID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="default",
+ .kw_string="KW_DEFAULT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="goto",
+ .kw_string="KW_GOTO",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="sizeof",
+ .kw_string="KW_SIZEOF",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="volatile",
+ .kw_string="KW_VOLATILE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="if",
+ .kw_string="KW_IF",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="static",
+ .kw_string="KW_STATIC",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="while",
+ .kw_string="KW_WHILE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="!",
+ .kw_string="KW_EXCLAMATION",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="%",
+ .kw_string="KW_PERCENT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="&",
+ .kw_string="KW_AND",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="&&",
+ .kw_string="KW_AND_AND",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="(",
+ .kw_string="KW_OPEN_NORMAL",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword=")",
+ .kw_string="KW_CLOSE_NORMAL",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="*",
+ .kw_string="KW_STAR",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="+",
+ .kw_string="KW_PLUS",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword=",",
+ .kw_string="KW_COMMA",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="-",
+ .kw_string="KW_MINUS",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword=".",
+ .kw_string="KW_DOT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="->",
+ .kw_string="KW_ARROW",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword=":",
+ .kw_string="KW_COLUMN",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword=";",
+ .kw_string="KW_SEMI_COLUMN",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="<",
+ .kw_string="KW_LESS",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="=",
+ .kw_string="KW_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="==",
+ .kw_string="KW_EQEQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword=">",
+ .kw_string="KW_MORE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="?",
+ .kw_string="KW_QUESTION",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="[",
+ .kw_string="KW_OPEN_SQUARE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="]",
+ .kw_string="KW_CLOSE_SQUARE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="^",
+ .kw_string="KW_HAT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="{",
+ .kw_string="KW_OPEN_CURLY",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="}",
+ .kw_string="KW_CLOSE_CURLY",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="|",
+ .kw_string="KW_PIPE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="||",
+ .kw_string="KW_PIPE_PIPE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="~",
+ .kw_string="KW_TILDE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="++",
+ .kw_string="KW_PLUSPLUS",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="--",
+ .kw_string="KW_MINUSMINUS",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword=">>",
+ .kw_string="KW_SHIFT_RIGHT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="<<",
+ .kw_string="KW_SHIFT_LEFT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="<=",
+ .kw_string="KW_LESS_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword=">=",
+ .kw_string="KW_MORE_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="!=",
+ .kw_string="KW_NOT_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="+=",
+ .kw_string="KW_PLUS_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="-=",
+ .kw_string="KW_MINUS_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="*=",
+ .kw_string="KW_STAR_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="%=",
+ .kw_string="KW_PERCENT_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="/=",
+ .kw_string="KW_DIV_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="<<=",
+ .kw_string="KW_SHIFT_LEFT_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword=">>=",
+ .kw_string="KW_SHIFT_RIGHT_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="&=",
+ .kw_string="KW_AND_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="^=",
+ .kw_string="KW_HAT_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="|=",
+ .kw_string="KW_PIPE_EQ",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="#",
+ .kw_string="KW_HASHTAG",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="##",
+ .kw_string="KW_HASHTAG_HASHTAG",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="...",
+ .kw_string="KW_ELIPSIS",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="/",
+ .kw_string="KW_DIV",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="inline",
+ .kw_string="KW_INLINE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="restrict",
+ .kw_string="KW_RESTRICT",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="_Bool",
+ .kw_string="KW_BOOL",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="_Complex",
+ .kw_string="KW_COMPLEX",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="_Imaginary",
+ .kw_string="KW_IMAGINARY",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="<:",
+ .kw_string="KW_OPEN_SQUARE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword=":>",
+ .kw_string="KW_CLOSE_SQUARE",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="<%",
+ .kw_string="KW_OPEN_CURLY",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="%>",
+ .kw_string="KW_CLOSE_CURLY",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="%:",
+ .kw_string="KW_HASHTAG",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="%:%:",
+ .kw_string="KW_HASHTAG_HASHTAG",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="NULL"
+ },
+ {
+ .keyword="defined",
+ .kw_string="KW_ID",
+ .action_string="AUTOMATA_ACTION_DISPENSE_TOKEN",
+ .data_string="&defined_special_identifier"
+ },
+ };
+
+ size_t number_of_chonky_keywords=sizeof(chonky_keywords)/sizeof(chonky_keywords[0]);
+ #endif
F diff --git a/src/syntax/automatas/generator/keyword_list.h b/src/syntax/automatas/generator/keyword_list.h new file mode 100644 --- /dev/null +++ b/src/syntax/automatas/generator/keyword_list.h
+ #ifndef WONKY_KEYWORD_LIST_H
+ #define WONKY_KEYWORD_LIST_H WONKY_KEYWORD_LIST_H
+ #include <keyword_list.hh>
+
+ #include <automata.h>
+ #include <wonky_malloc.h>
+ #include <generator.h>
+
+ struct Keyword_Entry
+ {
+ const char *keyword;
+ const char *kw_string;
+ const char *action_string;
+ const char *data_string;
+ };
+
+ extern struct Keyword_Entry chonky_keywords[];
+ extern size_t number_of_chonky_keywords;
+
+ #endif
F diff --git a/src/syntax/automatas/generator/keyword_list.hh b/src/syntax/automatas/generator/keyword_list.hh new file mode 100644 --- /dev/null +++ b/src/syntax/automatas/generator/keyword_list.hh
+ #ifndef WONKY_KEYWORD_LIST_HH
+ #define WONKY_KEYWORD_LIST_HH WONKY_KEYWORD_LIST_HH
+
+ struct Keyword_Entry;
+
+ #endif
F diff --git a/src/syntax/identifier/identifier.c b/src/syntax/identifier/identifier.c new file mode 100644 --- /dev/null +++ b/src/syntax/identifier/identifier.c
+ #ifndef WONKY_IDENTIFIER_C
+ #define WONKY_IDENTIFIER_C WONKY_IDENTIFIER_C
+ #include <identifier.h>
+
+ struct identifier* get_identifier(char *data,size_t size)
+ {
+ struct identifier *ret;
+ ret=wonky_malloc(sizeof(struct identifier));
+ ret->size=size;
+ ret->data=data;
+ ret->last_defined_macro_with_this_id=NULL;
+ ret->last_use_as_a_macro_argument=NULL;
+
+ return ret;
+ }
+ #endif
F diff --git a/src/syntax/identifier/identifier.h b/src/syntax/identifier/identifier.h new file mode 100644 --- /dev/null +++ b/src/syntax/identifier/identifier.h
+ #ifndef WONKY_IDENTIFIER_H
+ #define WONKY_IDENTIFIER_H WONKY_IDENTIFIER_H
+ #include <identifier.hh>
+ #include <token.h>
+
+ struct identifier /*there is only one of these per id*/
+ {
+ size_t size;
+ char *data;
+ struct token *last_defined_macro_with_this_id;
+ struct functionlike_define_directive_argument *last_use_as_a_macro_argument;
+ };
+
+
+ struct identifier* get_identifier(char *data,size_t size);
+ #endif
F diff --git a/src/syntax/identifier/identifier.hh b/src/syntax/identifier/identifier.hh new file mode 100644 --- /dev/null +++ b/src/syntax/identifier/identifier.hh
+ #ifndef WONKY_IDENTIFIER_HH
+ #define WONKY_IDENTIFIER_HH WONKY_IDENTIFIER_HH
+
+ struct identifier;
+
+ #endif
F diff --git a/src/syntax/source_file.c b/src/syntax/source_file.c new file mode 100644 --- /dev/null +++ b/src/syntax/source_file.c
+ #ifndef WONKY_SOURCE_FILE_C
+ #define WONKY_SOURCE_FILE_C WONKY_SOURCE_FILE_C
+ #include <source_file.h>
+
+ char *well_known_locations_base[]={"","/usr/include/","/usr/include/x86_64-linux-gnu/",NULL};
+ struct Source_File* extract_source_file(FILE *in,struct Source_Name *name)
+ {
+ long file_size;
+ struct Source_File *src;
+
+
+ if(fseek(in,0,SEEK_END)==-1)
+ return NULL;
+ if((file_size=ftell(in))==-1)
+ return NULL;
+ if(fseek(in,0,SEEK_SET)==-1)
+ return NULL;
+
+ src=wonky_malloc(sizeof(struct Source_File));
+
+ src->src_name=name;
+
+ src->src=wonky_malloc(file_size+1);
+ src->src_size=file_size;
+
+ src->where_in_src=0;
+
+ src->which_column=0;
+ src->which_row=0;
+ src->is_in_the_begining_of_line=1;
+
+ src->src[file_size]='\0';
+
+ fread(src->src,1,file_size,in);
+ fclose(in);
+ return src;
+ }
+ /*this might cause compatability issues TODO*/
+ void normalise_source_name(struct Source_Name *name)
+ {
+ size_t offset;
+ size_t i;
+ size_t last_slash;
+ char *hold_base;
+
+ for(last_slash=offset=0;name->filename[offset];++offset)
+ {
+ if(name->filename[offset]=='/')
+ {
+ last_slash=offset;
+ }
+ }
+
+ if(last_slash==0)
+ return;
+
+ if(name->base==NULL)
+ {
+ offset=0;
+ name->base=wonky_malloc(last_slash+1);
+ name->base[last_slash]='\0';
+ name->base[last_slash-1]='/';
+
+ }else
+ {
+ offset=gstrlen((char*)name->base);
+ hold_base=wonky_malloc(offset+last_slash+2);
+ strmv(hold_base,(char*)name->base);
+
+ hold_base[last_slash+offset]='/';
+ hold_base[last_slash+offset+1]='\0';
+ wonky_free((void*)name->base);
+
+ name->base=hold_base;
+ }
+
+ for(i=0;i<last_slash;++i)
+ name->base[offset+i]=name->filename[i];
+
+
+ ++i;
+ /*prune the filename*/
+ offset=gstrlen(name->filename+i);
+ hold_base=wonky_malloc(offset+1);
+ strmv(hold_base,name->filename+i);
+ wonky_free(name->filename);
+ name->filename=hold_base;
+
+
+ }
+ /*here be dragons*/
+ char src_getc(struct Source_File *src,char skip_line_splice,char skip_comments,char skip_new_line)
+ {
+ superhack:
+ if(src->src[src->where_in_src]=='\\' && skip_line_splice)
+ {
+ if(src->where_in_src < src->src_size-1 && src->src[src->where_in_src+1]=='\n')
+ {
+ src->where_in_src+=2;
+ ++src->which_row;
+ src->token_size+=2;
+ src->which_column=0;
+ goto superhack;
+ }else
+ {
+ ++src->token_size;
+ ++src->which_column;
+ ++src->where_in_src;
+ src->is_in_the_begining_of_line=0;
+ return '\\';
+ }
+ }else
+ {
+ if(src->src[src->where_in_src]=='\n' && skip_new_line)
+ {
+ ++src->which_row;
+ src->which_column=0;
+ src->is_in_the_begining_of_line=1;
+
+ ++src->where_in_src;
+ goto superhack;
+ }else if(src->src[src->where_in_src]=='/' && skip_comments)
+ {
+ if(src->src[src->where_in_src+1]=='*')
+ {
+ char hold_char;
+
+
+ src->where_in_src+=2;
+ hold_char=src_getc(src,1,0,1);
+ while(hold_char)
+ {
+ if(hold_char=='*')
+ {
+ hold_char=src_getc(src,1,0,1);
+ if(hold_char=='\0')
+ {
+ src->where_in_src=src->src_size;
+ return '\0';
+ }
+ else if(hold_char=='/')
+ {
+ goto superhack;
+ }
+ }else
+ {
+ hold_char=src_getc(src,1,0,1);
+ }
+ }
+ src->where_in_src=src->src_size;
+ return '\0';
+
+ }
+ }else
+ {
+ ++src->which_column;
+ }
+ ++src->token_size;
+ if(src->src[src->where_in_src]!='#' || src->is_in_the_begining_of_line!=1)
+ src->is_in_the_begining_of_line=0;
+ if(src->src[src->where_in_src]=='\n')
+ {
+ return '\n';
+ }
+
+ if(src->src[src->where_in_src]=='\0')
+ return src->src[src->where_in_src];
+ else
+ return src->src[src->where_in_src++];
+ }
+ }
+ void src_ungetc(struct Source_File *src)
+ {
+ --src->where_in_src;
+ if(src->src[src->where_in_src]=='\n')
+ {
+ --src->which_row;
+ src->which_column=0;
+ }
+ }
+ struct token* src_extract_token(struct Source_File *src,enum LEXER_TYPE kw)
+ {
+ struct token *ret;
+ ret=wonky_malloc(sizeof(struct token));
+ ret->type=kw;
+
+ ret->data_size=src->best_token_size;
+ ret->column=src->best_token_column;
+ ret->line=src->best_token_line;
+ ret->data=src->src+src->best_token_where_in_src_start;
+ ret->filename=src->src_name->filename;
+ handle_splicing(ret);
+ src->where_in_src=src->best_token_where_in_src_end;
+ src->is_in_the_begining_of_line=src->best_token_beg_line;
+ return ret;
+ }
+
+ void src_reset_token_data(struct Source_File *src,char use_src_as_base)
+ {
+ src->token_size=0;
+ src->best_token_size=0;
+ src->best_token_line=src->which_row;
+ src->best_token_column=src->which_column;
+ if(use_src_as_base)
+ {
+ src->best_token_where_in_src_end=src->where_in_src;
+ }else
+ {
+ src->where_in_src=src->best_token_where_in_src_end;
+ }
+ src->best_token_where_in_src_start=src->where_in_src;
+ }
+ void src_assimilate_into_best_token(struct Source_File *src)
+ {
+ src->best_token_size=src->token_size;
+ src->best_token_line=src->which_row;
+ src->best_token_column=src->which_column;
+ src->best_token_where_in_src_end=src->where_in_src;
+ src->best_token_beg_line=src->is_in_the_begining_of_line;
+ }
+ void delete_source_file(struct Source_File *src)
+ {
+ delete_source_name(src->src_name);
+ wonky_free(src->src);
+ wonky_free(src);
+ }
+ void delete_source_name(struct Source_Name *name)
+ {
+ wonky_free(name->filename);
+ wonky_free(name->base);
+ wonky_free(name);
+ }
+ void goto_new_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ char hold_char;
+ while( (hold_char=src_getc(src,1,1,0)) != '\0' && hold_char != '\n');
+ src->is_in_the_begining_of_line=1;
+ ++src->where_in_src;
+ ++src->which_row;
+ src->which_column=0;
+ }
+ void chase_new_line(struct Source_File *src,struct Translation_Data *translation_data)
+ {
+ char hold_char;
+ for(hold_char=src_getc(src,1,1,0);hold_char!='\n' && hold_char!='\0';
+ hold_char=src_getc(src,1,1,0));
+
+ ++src->where_in_src;
+ ++src->which_row;
+ src->is_in_the_begining_of_line=1;
+ src->which_column=0;
+
+
+ }
+ /*returns the number of bytes skipped*/
+ size_t skip_line_splice(struct Source_File *src)
+ {
+ size_t current_size=0;
+ while(src->where_in_src<src->src_size-1 && src->src[src->where_in_src]=='\\' && src->src[src->where_in_src+1]=='\n')
+ {
+ src->where_in_src+=2;
+ current_size+=2;
+ }
+ return current_size;
+ }
+
+ void skip_white_space(struct Source_File *src,char skip_new_line)
+ {
+ char hold_char;
+ while(hold_char=src_getc(src,1,1,skip_new_line))
+ {
+ if(hold_char=='\n' && !skip_new_line)
+ {
+ return ;
+ }
+ if(hold_char!=' ' && hold_char!='\t')
+ {
+ src_ungetc(src);
+ return ;
+ }
+ }
+ }
+ /*where_to_search ends in a NULL pointer*/
+ struct Source_File* get_source_file(char *filename,char **where_to_search)
+ {
+ FILE *in;
+ char *temp_name;
+ char is_directory=0;
+ struct Source_Name *name;
+ struct Source_File *file;
+
+ wonky_assert(where_to_search!=NULL);
+ wonky_assert(*where_to_search!=NULL);
+ do
+ {
+ temp_name=gstr_append(*where_to_search,filename);
+ in=fopen(temp_name,"r");
+ wonky_free(temp_name);
+ if(in==NULL)
+ continue;
+
+ name=get_source_name(filename,*where_to_search);
+ file=extract_source_file(in,name);
+ if(file!=NULL)
+ {
+ return file;
+ }else
+ {
+ delete_source_name(name);
+ }
+ }while(*(++where_to_search));
+ return NULL;
+ }
+ #endif
F diff --git a/src/syntax/source_file.h b/src/syntax/source_file.h new file mode 100644 --- /dev/null +++ b/src/syntax/source_file.h
+ #ifndef WONKY_SOURCE_FILE_H
+ #define WONKY_SOURCE_FILE_H WONKY_SOURCE_FILE_H
+ #include <source_file.hh>
+ extern char *well_known_locations_base[];
+ struct Source_Name
+ {
+ char *filename;
+ char *base;
+ char *normalised_name;
+
+ size_t filename_size;
+ size_t base_size;
+ size_t normalised_name_size;
+ };
+ struct Source_Location
+ {
+ size_t line;
+ size_t column;
+ };
+ struct Source_File
+ {
+ enum Source_Text_Type type;
+ char *src;
+ size_t src_size;
+
+ struct Source_Name *src_name;
+ };
+ struct Source_Section
+ {
+ enum Source_Text_Type type;
+ char *src;
+ size_t src_size;
+
+ struct Source_Name *src_name;
+ struct Source_Location *where_in_source;
+ };
+
+ struct Source_File* extract_source_file(FILE *in,struct Source_Name *name);
+ struct Source_File* get_source_file(char *filename,char **where_to_search);
+ struct Source_Name* get_source_name(char *filename,char *base);
+ void normalise_source_name(struct Source_Name *name);
+ char src_getc(struct Source_File *src,char skip_line_splice,char skip_comments,char skip_new_line);
+ void src_ungetc(struct Source_File *src);
+ struct token* src_extract_token(struct Source_File *src,enum LEXER_TYPE kw);
+ void src_reset_token_data(struct Source_File *src,char use_src_as_base);
+ void src_assimilate_into_best_token(struct Source_File *src);
+ void chase_new_line(struct Source_File *src,struct Translation_Data *translation_data);
+ void goto_new_line(struct Source_File *src,struct Translation_Data *translation_data);
+ void skip_white_space(struct Source_File *src,char skip_new_line);
+ size_t skip_line_splice(struct Source_File *src);
+ void delete_source_file(struct Source_File *src);
+ void delete_source_name(struct Source_Name *name);
+ #endif
F diff --git a/src/syntax/source_file.hh b/src/syntax/source_file.hh new file mode 100644 --- /dev/null +++ b/src/syntax/source_file.hh
+ #ifndef WONKY_SOURCE_FILE_HH
+ #define WONKY_SOURCE_FILE_HH WONKY_SOURCE_FILE_HH
+
+ struct Source_Name;
+ struct Source_Location;
+ struct Source_File;
+ struct Source_Section;
+ enum Source_Text_Type
+ {
+ SOURCE_TEXT_FULL_TEXT,
+ SOURCE_TEXT_PARTIAL_TEXT,
+ SOURCE_TEXT_TYPE_END
+ };
+
+ #endif
F diff --git a/src/syntax/token/token.c b/src/syntax/token/token.c new file mode 100644 --- /dev/null +++ b/src/syntax/token/token.c
+ #ifndef WONKY_TOKEN_C
+ #define WONKY_TOKEN_C WONKY_TOKEN_C
+ #include <token.h>
+
+ struct token* copy_token(struct token *src)
+ {
+ struct token *cpy;
+ cpy=wonky_malloc(sizeof(struct token));
+ *cpy=*src;
+ return cpy;
+ }
+
+ char compare_tokens(struct token *a,struct token *b)
+ {
+ size_t i;
+ if(a->data_size!=b->data_size)
+ return 0;
+ for(i=0;i<a->data_size;++i)
+ {
+ if(a->data[i]!=b->data[i])
+ return 0;
+ }
+ return 1;
+ }
+
+ struct token_identifier* get_id_token(struct identifier *id,struct Source_Location *location)
+ {
+ struct token_identifier *ret;
+ ret=wonky_malloc(sizeof(struct token_identifier));
+ ret->type=KW_ID;
+ ret->location=location;
+ ret->id=id;
+
+
+ return ret;
+ }
+ struct token_keyword* get_keyword_token(enum LEXER_TYPE type,struct Source_Location *location)
+ {
+ struct token_keyword *ret;
+ ret=wonky_malloc(sizeof(struct token_keyword));
+ ret->type=type;
+ ret->location=location;
+
+ return ret;
+ }
+ struct token_punctuator* get_punctuator_token(enum LEXER_TYPE type,struct Source_Location *location)
+ {
+ struct token_punctuator *ret;
+ ret=wonky_malloc(sizeof(struct token_punctuator));
+ ret->type=type;
+ ret->location=location;
+ ret->punctuator_type=PUNCTUATOR_NORMAL;/*TODO*/
+
+ return ret;
+ }
+ struct token_constant* get_constant_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size)
+ {
+ struct token_constant *ret;
+ ret=wonky_malloc(sizeof(struct token_constant));
+ ret->location=location;
+ ret->type=bare_type;/*TODO*/
+
+ switch(bare_type)
+ {
+
+ case KW_HEXADECIMAL_CONSTANT:
+ function(KW_HEXADECIMAL_CONSTANT);
+ break;
+ case KW_DECIMAL_CONSTANT:
+ function(KW_DECIMAL_CONSTANT);
+ break;
+ case KW_OCTAL_CONSTANT:
+ function(KW_OCTAL_CONSTANT);
+ break;
+ case KW_UNSIGNED_DECIMAL_CONSTANT:
+ function(KW_UNSIGNED_DECIMAL_CONSTANT);
+ break;
+ case KW_UNSIGNED_OCTAL_CONSTANT:
+ function(KW_UNSIGNED_OCTAL_CONSTANT);
+ break;
+ case KW_UNSIGNED_HEXADECIMAL_CONSTANT:
+ function(KW_UNSIGNED_HEXADECIMAL_CONSTANT);
+ break;
+ case KW_UNSIGNED_LONG_HEXADECIMAL_CONSTANT:
+ function(KW_UNSIGNED_LONG_HEXADECIMAL_CONSTANT);
+ break;
+ case KW_UNSIGNED_LONG_OCTAL_CONSTANT:
+ function(KW_UNSIGNED_LONG_OCTAL_CONSTANT);
+ break;
+ case KW_UNSIGNED_LONG_DECIMAL_CONSTANT:
+ function(KW_UNSIGNED_LONG_DECIMAL_CONSTANT);
+ break;
+ case KW_UNSIGNED_LONG_LONG_DECIMAL_CONSTANT:
+ function(KW_UNSIGNED_LONG_LONG_DECIMAL_CONSTANT);
+ break;
+ case KW_UNSIGNED_LONG_LONG_HEXADECIMAL_CONSTANT:
+ function(KW_UNSIGNED_LONG_LONG_HEXADECIMAL_CONSTANT);
+ break;
+ case KW_UNSIGNED_LONG_LONG_OCTAL_CONSTANT:
+ function(KW_UNSIGNED_LONG_LONG_OCTAL_CONSTANT);
+ break;
+ case KW_LONG_HEXADECIMAL_CONSTANT:
+ function(KW_LONG_HEXADECIMAL_CONSTANT);
+ break;
+ case KW_LONG_OCTAL_CONSTANT:
+ function(KW_LONG_OCTAL_CONSTANT);
+ break;
+ case KW_LONG_DECIMAL_CONSTANT:
+ function(KW_LONG_DECIMAL_CONSTANT);
+ break;
+ case KW_LONG_LONG_HEXADECIMAL_CONSTANT:
+ function(KW_LONG_LONG_HEXADECIMAL_CONSTANT);
+ break;
+ case KW_LONG_LONG_OCTAL_CONSTANT:
+ function(KW_LONG_LONG_OCTAL_CONSTANT);
+ break;
+ case KW_LONG_LONG_DECIMAL_CONSTANT:
+ function(KW_LONG_LONG_DECIMAL_CONSTANT);
+ break;
+ case KW_DOUBLE_DECIMAL_CONSTANT:
+ function(KW_DOUBLE_DECIMAL_CONSTANT);
+ break;
+ case KW_LONG_DOUBLE_DECIMAL_CONSTANT:
+ function(KW_LONG_DOUBLE_DECIMAL_CONSTANT);
+ break;
+ case KW_FLOAT_DECIMAL_CONSTANT:
+ function(KW_FLOAT_DECIMAL_CONSTANT);
+ break;
+ case KW_DOUBLE_HEXADECIMAL_CONSTANT:
+ function(KW_DOUBLE_HEXADECIMAL_CONSTANT);
+ break;
+ case KW_LONG_DOUBLE_HEXADECIMAL_CONSTANT:
+ function(KW_LONG_DOUBLE_HEXADECIMAL_CONSTANT);
+ break;
+ case KW_FLOAT_HEXADECIMAL_CONSTANT:
+ function(KW_FLOAT_HEXADECIMAL_CONSTANT);
+ break;
+ case KW_CHAR_CONSTANT:
+ function(KW_CHAR_CONSTANT);
+ break;
+ case KW_WIDE_CHAR_CONSTANT:
+ function(KW_WIDE_CHAR_CONSTANT);
+ break;
+ default:
+ wonky_assert(SHOULD_NOT_REACH_HERE);
+ }
+
+ return ret;
+ }
+ struct token_string* get_string_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size)
+ {
+ struct token_string *ret;
+ ret=wonky_malloc(sizeof(struct token_string));
+ ret->type=bare_type;
+ ret->location=location;
+ ret->size=size;
+ ret->data=data;
+
+
+ return ret;
+ }
+ struct token_include_directive* get_include_directive_token(struct Source_Location *location,struct Queue *tokens)
+ {
+ struct token_include_directive *ret;
+ ret=wonky_malloc(sizeof(struct token_string));
+ ret->location=location;
+ ret->tokens=tokens;
+ return ret;
+ }
+ struct token_if_directive* get_if_directive_token(struct Source_Location *location,struct Queue *controlling_tokens,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive)
+ {
+ struct token_if_directive *ret;
+ ret=wonky_malloc(sizeof(struct token_if_directive));
+ ret->
+
+ }
+
+ struct token_ifdef_directive* get_ifdef_directive_token(struct Source_Location *location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
+
+ struct token_ifdef_directive* get_ifdef_directive_token(struct Source_Location *location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
+
+ struct token_normal_define_directive* get_normal_define_directive_token(struct Source_Location *location,struct identifier *id,struct Queue *replacement_tokens);
+
+ struct token_functionlike_define_directive* get_functionlike_define_directive_token(struct Source_Location *location,struct identifier *id,struct Queue *argument_id_list_tokens,struct Queue *replacement_tokens);
+
+ struct token_undef_directive* get_undef_directive_token(struct Source_Location *location,struct identifier *id);
+ struct token_line_directive* get_line_directive_token(struct Source_Location *location,struct Source_Location *new_location);
+ struct token_error_directive* get_error_directive_token(struct Source_Location *location,struct token_string *error_message);
+ struct token_pragma_directive* get_pragma_directive(struct Source_Location *location,enum Pragma_Type type);
+ struct token_defined_unary_operator* get_defined_unary_operator(struct Source_Location *location,struct identifier *id);
+ #endif
F diff --git a/src/syntax/token/token.h b/src/syntax/token/token.h new file mode 100644 --- /dev/null +++ b/src/syntax/token/token.h
+ #ifndef WONKY_TOKEN_H
+ #define WONKY_TOKEN_H WONKY_TOKEN_H
+ #include <token.hh>
+
+ #include <automata.h>
+ #include <constant.h>
+ #include <wonky_malloc.h>
+ #include <wonky_assert.h>
+ #include <source_file.h>
+
+ struct token
+ {
+ enum LEXER_TYPE type;
+ };
+ struct token_identifier
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ struct identifier *id;
+ };
+ struct token_keyword
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ };
+ struct token_punctuator
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ enum Punctuator_Token_Type punctuator_type;
+ };
+ struct token_constant
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ struct Constant *constant;
+ };
+ struct token_string
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ enum String_Token_Type string_type;
+ size_t size;
+ char *data;
+ };
+ struct token_include_directive
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ struct Queue *tokens;
+ };
+ struct token_if_directive
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ struct Queue *controlling_expression;
+ struct Queue_Node *if_true;
+ struct Queue_Node *if_false;
+ struct Queue_Node *end_of_if_directive;
+ };
+ struct token_ifdef_directive
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ struct identifier *id;
+ struct Queue_Node *if_defined;
+ struct Queue_Node *if_undefined;
+ struct Queue_Node *end_of_ifdef_directive;
+ };
+ struct token_ifndef_directive
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ struct identifier *id;
+ struct Queue_Node *if_undefined;
+ struct Queue_Node *if_defined;
+ struct Queue_Node *end_of_ifndef_directive;
+ };
+ struct token_normal_define_directive
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ struct identifier *id;
+ struct Queue *replacement_tokens;
+ struct Translation_Unit *the_last_place_this_macro_was_defined;
+ };
+ struct functionlike_define_directive_argument
+ {
+ struct token_functionlike_define_directive *belongs_to;
+ struct Queue_Node *first_in_argument_substitution_tokens;
+ size_t number_of_substitution_tokens;
+ };
+ struct token_functionlike_define_directive
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ struct identifier *id;
+ struct Queue *arguments;
+ struct Queue *replacement_tokens;
+ struct Translation_Unit *the_last_place_this_macro_was_defined;
+ };
+ struct token_undef_directive
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ struct identifier *id;
+ };
+ struct token_line_directive
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *directive_location;
+ struct Source_Location *real_location;
+ struct Source_Location *new_location;
+ };
+ struct token_error_directive
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ struct token_string *error_message;
+ };
+ struct token_pragma_directive
+ {
+ enum LEXER_TYPE type;
+ struct Source_Location *location;
+ enum Pragma_Type pragma_type;
+ };
+ struct token_defined_unary_operator
+ {
+ enum LEXER_TYPE type;
+ };
+ struct token_unlexed_source_part
+ {
+ enum LEXER_TYPE type;
+ struct Source_Section *section;
+ };
+
+ /*
+ * OLD TOKEN STRUCT
+ struct token
+ {
+ enum LEXER_TYPE type;
+ size_t data_size;
+ char *data;
+ size_t line,column;
+ const char *filename;
+ };
+ */
+ struct token* copy_token(struct token *src);
+ void handle_splicing(struct token *word);
+ char compare_tokens(struct token *a,struct token *b);
+
+
+ struct token_identifier* get_id_token(struct identifier *id,struct Source_Location *location);
+ struct token_keyword* get_keyword_token(enum LEXER_TYPE type,struct Source_Location *location);
+ struct token_punctuator* get_punctuator_token(enum LEXER_TYPE type,struct Source_Location *location);
+ struct token_constant* get_constant_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size);
+ struct token_string* get_string_token(enum LEXER_TYPE bare_type,struct Source_Location *location,char *data,size_t size);
+ struct token_include_directive* get_include_directive_token(struct Source_Location *location,struct Queue *tokens);
+ struct token_if_directive* get_if_directive_token(struct Source_Location *location,struct Queue *controlling_tokens,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
+
+ struct token_ifdef_directive* get_ifdef_directive_token(struct Source_Location *location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
+
+ struct token_ifdef_directive* get_ifdef_directive_token(struct Source_Location *location,struct identifier *id,struct Queue_Node *if_true,struct Queue_Node *if_false,struct Queue_Node *end_of_if_directive);
+
+ struct token_normal_define_directive* get_normal_define_directive_token(struct Source_Location *location,struct identifier *id,struct Queue *replacement_tokens);
+
+ struct token_functionlike_define_directive* get_functionlike_define_directive_token(struct Source_Location *location,struct identifier *id,struct Queue *argument_id_list_tokens,struct Queue *replacement_tokens);
+
+ struct token_undef_directive* get_undef_directive_token(struct Source_Location *location,struct identifier *id);
+ struct token_line_directive* get_line_directive_token(struct Source_Location *location,struct Source_Location *new_location);
+ struct token_error_directive* get_error_directive_token(struct Source_Location *location,struct token_string *error_message);
+ struct token_pragma_directive* get_pragma_directive(struct Source_Location *location,enum Pragma_Type type);
+ struct token_defined_unary_operator* get_defined_unary_operator(struct Source_Location *location,struct identifier *id);
+
+ #endif
F diff --git a/src/syntax/token/token.hh b/src/syntax/token/token.hh new file mode 100644 --- /dev/null +++ b/src/syntax/token/token.hh
+ #ifndef WONKY_TOKEN_HH
+ #define WONKY_TOKEN_HH WONKY_TOKEN_HH
+
+ struct token;
+ struct identifier;
+ struct token_identifier;
+ struct token_keyword;
+ struct token_punctuator;
+ struct token_constant;
+ struct token_string;
+ struct token_include_directive;
+ struct token_if_directive;
+ struct token_ifdef_directive;
+ struct token_ifndef_directive;
+ struct token_normal_define_directive;
+ struct functionlike_define_directive_argument;
+ struct token_functionlike_define_directive;
+ struct token_undef_directive;
+ struct token_line_directive;
+ struct token_error_directive;
+ struct token_pragma_directive;
+ struct token_defined_unary_operator;
+
+ enum Punctuator_Token_Type
+ {
+ PUNCTUATOR_NORMAL,
+ PUNCTUATOR_DIGRAPH,
+ PUNCTUATOR_TRIGRAPH,
+ PUNCTUATOR_TYPE_END
+ };
+ enum Constant_Token_Encoding
+ {
+ CONSTANT_TOKEN_HEXADECIMAL,
+ CONSTANT_TOKEN_DECIMAL,
+ CONSTANT_TOKEN_OCTAL,
+ CONSTANT_TOKEN_TYPE_END
+ };
+ enum Constant_Token_Specifier
+ {
+ CONSTANT_TOKEN_SPECIFIER_NONE,
+ CONSTANT_TOKEN_SPECIFIER_LONG,
+ CONSTANT_TOKEN_SPECIFIER_LONG_LONG,
+ CONSTANT_TOKEN_SPECIFIER_END
+
+ };
+ enum Constant_Token_Signedness
+ {
+ CONSTANT_TOKEN_SIGNED,
+ CONSTANT_TOKEN_UNSIGNED,
+ CONSTANT_TOKEN_SIGNEDNESS_END
+ };
+ enum String_Token_Type
+ {
+ STRING_TOKEN_NORMAL,
+ STRING_TOKEN_WIDE,
+ STRING_TOKEN_TYPE_END
+ };
+ enum Pragma_Type
+ {
+ PRAGMA_TYPE_END
+ };
+
+ #endif
F diff --git a/src/wonky.h b/src/wonky.h --- a/src/wonky.h +++ b/src/wonky.h
#include <ast.h>
#include <denoted.h>
#include <lexer.h>
- #include <location.h>
+ #include <memory_location.h>
#include <parse_declaration.h>
#include <parse_expression.h>
#include <parse_statement.h>