#include #include #include /* #define STR_SIZE_LIMIT 65536 */ #define STR_SIZE_LIMIT 5458264 #define IMP #define WANT_BUILD #define WANT_CSTR #define WANT_STR #define WANT_MAP #define WANT_TOKENIZER #define WANT_DYN_ARR #define WANT_PATH #define WANT_ENV #define WANT_SLOP #define WANT_CMD #include "./src/lib.h" #include #include /* # define DIE(f, e) \ (f); \ if ( (e) != ERR_OK ) {\ fprintf(stderr, \ "Error while running `" #f "`: %s\n", \ err_to_name[(e)]); \ goto exit_err; \ } # define PPP(f) \ printf("PRE: `" #f "`\n"); \ f; \ printf("POST: `" #f "`\n"); */ int main(int argc, char *argv[]) { enum err err = ERR_OK; struct cmd cmd = {0}; cmd = cmd_create_ns("/bin/echo", &err); cmd_append_arg_ns(&cmd, "This is a test of execve", &err); cmd_exec(&cmd, &err); cmd_destroy(&cmd, &err); if ( err != ERR_OK ) { cmd_destroy(&cmd, NULL); fprintf(stderr, "Failed cmd: %s\n", err_to_name[err]); return 1; } /* bool was_rebuild = false; struct dyn_arr dirs = {0}; size_t i = 0; was_rebuild = build_go_rebuild_yourself(__FILE__, &err); if ( was_rebuild == true ) { return 0; } dirs = dir_list_with_ext("./src", ".c", &err); free(dirs); */ UNUSED(argc); UNUSED(argv); return 0; } bool skip_token(struct tokenizer *tkn, char c); enum c_token_type { _TT = TK_LAST, TK_PP_DEF, TK_PP_INC }; struct pp_def { struct str name; struct str val; }; # define PP_DEF_DESTROY(Var) \ if ( (Var) != NULL ) { \ STR_DESTROY((Var)->name); \ STR_DESTROY((Var)->val); \ LIB_FREE((Var)); \ } struct var_decl { bool is_ptr; struct str type; struct str name; }; # define VAR_DECL_DESTROY(Var) \ if ( (Var) != NULL ) { \ STR_DESTROY((Var)->type); \ STR_DESTROY((Var)->name); \ } struct func_decl { struct str ret_type; struct str name; struct { struct var_decl *data; u64 size; u64 cap; } args; }; void func_decl_destroy(struct func_decl *fd); void func_decl_destroy(struct func_decl *fd) { u64 i = 0; if ( fd == NULL ) return; STR_DESTROY(fd->ret_type); STR_DESTROY(fd->name); for ( i = 0; i < fd->args.size; ++i ) { STR_DESTROY(fd->args.data[i].type); STR_DESTROY(fd->args.data[i].name); } LIB_FREE(fd->args.data); LIB_FREE(fd); } bool tkn_expect(struct tokenizer *tkn, enum token_type type, struct token *out_tk, enum err *out_err); bool tkn_expect_id(struct tokenizer *tkn, const char *cstr, struct token *out_tk, enum err *out_err); bool tkn_parse_function(struct tokenizer *tkn); bool run_function(struct str str); bool tkn_parse_pp_directive(struct tokenizer *tkn, struct token *out_tk, enum err *out_err); bool tkn_parse_var(struct tokenizer *tkn, struct token *out_tk, enum err *out_err); bool tkn_parse_decl(struct tokenizer *tkn, struct token *out_tk, enum err *out_err); bool is_space(char c); bool is_space(char c) { return (c == ' ') || (c == '\r') || (c == '\n') || (c == '\t') || (c == '\v'); } int main2(int argc, char *argv[]); int main2(int argc, char *argv[]) { enum err err = ERR_OK; struct path src_path = {0}; struct file f = {0}; struct str code = {0}; struct tokenizer tkn = {0}; struct tokenizer_options tkn_opts = {0}; struct token tk = {0}; struct dyn_arr tk_da = {0}; tk_da = dyn_arr_create(sizeof(struct token), &err); src_path = path_from_cstr_ns("./first.c", &err); f = path_file_read_all(&src_path, &err); code = str_from_cstr((char*)f.data, f.size, &err); tkn_opts.skip_token = skip_token; tkn = tokenizer_create(code, src_path, &tkn_opts, &err); tk = tokenizer_next_token(&tkn, &err); do { if ( err != ERR_OK ) { fprintf(stderr, "ERROR: Failed tokenizing `%.*s`: %s\n", (int) src_path.size, src_path.data, err_to_name[err]); goto error_exit; } switch ( tk.type ) { case TK_POUND: { if ( ! tkn_parse_pp_directive(&tkn, NULL, &err) ) goto error_exit; } break; case TK_ID: if ( ! tkn_parse_decl(&tkn, NULL, &err) ) goto error_exit; case TK_NL: break; case TK_SLASH: { if ( ! tkn_expect(&tkn, TK_ASTERISK, NULL, &err) ) goto error_exit; while ( tk.type != TK_ASTERISK ) tk = tokenizer_next_token(&tkn, &err); if ( ! tkn_expect(&tkn, TK_SLASH, NULL, &err) ) goto error_exit; } break; default: fprintf(stderr, "%s ERROR: Invalid Token `%s`\n", tokenizer_token_loc_temp(&tkn, &tk, TLF_VIM, NULL), token_to_cstr(tk.type)); goto error_exit; } tk = tokenizer_next_token(&tkn, &err); } while ( tk.type != TK_EOF ); printf("%s\n", err_to_name[err]); error_exit: if ( f.data != NULL ) free(f.data); if ( tk_da.data != NULL ) dyn_arr_destroy(&tk_da, NULL); (void) argc; (void) argv; return 0; } bool skip_token(struct tokenizer *tkn, char c) { UNUSED(tkn); return (c == ' ') || (c == '\r') || (c == '\t'); } bool tkn_expect(struct tokenizer *tkn, enum token_type type, struct token *out_tk, enum err *out_err) { enum err err = ERR_OK; enum err *perr = &err; LIB_ARG_IF_NOT_NULL_MUST_BE(out_err, ERR_OK, false); if ( tokenizer_is_next(tkn, type, out_tk, perr) == false ) { struct token tk = {0}; tk = tokenizer_next_token(tkn, perr); if ( err != ERR_OK ) { fprintf(stderr, "Failed to get next token: %s\n", err_to_name[err]); return false; } fprintf(stderr, "%s ERRRO: Got wrong token, expected: %s, got: %s\n", tokenizer_token_loc_temp(tkn, &tk, TLF_VIM, NULL), token_to_cstr(type), token_to_cstr(tk.type)); return false; } return true; } bool tkn_expect_id(struct tokenizer *tkn, const char *cstr, struct token *out_tk, enum err *out_err) { enum err err = ERR_OK; struct str str = {0}; LIB_ARG_IF_NOT_NULL_MUST_BE(out_err, ERR_OK, false); LIB_ARG_MUST_NOT_BE_NULL(cstr, out_err, false); str = str_from_cstr_ns(cstr, &err); if ( tokenizer_is_next_id(tkn, str, out_tk, &err) == false ) { struct token tk = {0}; tk = tokenizer_next_token(tkn, &err); if ( err != ERR_OK ) { fprintf(stderr, "Failed to get next token: %s\n", err_to_name[err]); return false; } if ( tk.type != TK_ID ) { fprintf(stderr, "%s ERROR: Got wrong token, expected: TK_ID, got: %s\n", tokenizer_token_loc_temp(tkn, &tk, TLF_VIM, NULL), token_to_cstr(tk.type)); return false; } fprintf(stderr, "%s ERROR: Got wrong id, expected: %s, got: %.*s\n", tokenizer_token_loc_temp(tkn, &tk, TLF_VIM, NULL), cstr, (int) tk.string.size, tk.string.data); return false; } return true; } bool tkn_parse_pp_directive(struct tokenizer *tkn, struct token *out_tk, enum err *out_err) { struct token tk = {0}; enum err err = ERR_OK; enum err *perr = &err; LIB_ARG_IF_NOT_NULL_MUST_BE(out_err, ERR_OK, false); LIB_ARG_MUST_NOT_BE_NULL(tkn, out_err, false); if ( out_err != NULL ) { perr = out_err; } tk = tokenizer_next_token(tkn, perr); if ( *perr != ERR_OK ) { fprintf(stderr, "Failed to get next token: %s\n", err_to_name[*perr]); return false; } if ( tk.type != TK_ID ) { fprintf(stderr, "%s Got wrong token, expected: TK_ID, got: %s\n", tokenizer_token_loc_temp(tkn, &tk, TLF_VIM, NULL), token_to_cstr(tk.type)); return false; } if ( str_eq_cstr(tk.string, "define", 6) == true ) { struct token tk_def = {0}; struct pp_def *def = NULL; def = malloc(sizeof(struct pp_def)); if ( def == NULL ) { *perr = ERR_FAILED_ALLOC; goto def_exit_err; } memset(def, 0, sizeof(struct pp_def)); if ( ! tkn_expect(tkn, TK_ID, &tk, perr) ) goto def_exit_err; tk_def.loc_start = tk.loc_start; def->name = str_dup(tk.string); tk = tokenizer_next_token(tkn, &err); if ( err != ERR_OK ) { fprintf(stderr, "Failed to get next token: %s\n", err_to_name[err]); goto def_exit_err; } switch ( tk.type ) { case TK_ID: case TK_STR_LIT: case TK_NUM_LIT: break; case TK_NL: goto def_wout_value; default: fprintf(stderr, "%s Got wrong token, expected:" " TK_ID/TK_STR_LIT/TK_NUM_LIT, got: %s\n", tokenizer_token_loc_temp(tkn, &tk, TLF_VIM, NULL), token_to_cstr(tk.type)); goto def_exit_err; } def->val = str_dup(tk.string); tk_def.loc_end = tk.loc_end; if ( ! tkn_expect(tkn, TK_NL, NULL, perr) ) goto def_exit_err; def_wout_value: tk_def.extra = def; if ( out_tk != NULL ) { *out_tk = tk_def; } else { PP_DEF_DESTROY(def); } return true; def_exit_err: PP_DEF_DESTROY(def); return false; } if ( str_eq_cstr(tk.string, "include", 7) == true ) { struct token tk_inc = {0}; tk_inc.type = (enum token_type) TK_PP_INC; tk = tokenizer_next_token(tkn, &err); if ( err != ERR_OK ) { fprintf(stderr, "Failed to get next token: %s\n", err_to_name[err]); return false; } if ( tk.type == TK_STR_LIT ) { tk_inc.loc_start = tk.loc_start; tk_inc.loc_end = tk.loc_end; tk_inc.string = str_dup(tk.string); goto inc_str_lit; } else if ( tk.type != TK_L_ANG_BRACKET) { fprintf(stderr, "%s Got wrong token, expected:" " TK_ID/TK_STR_LIT/TK_NUM_LIT, got: %s\n", tokenizer_token_loc_temp(tkn, &tk, TLF_VIM, NULL), token_to_cstr(tk.type)); goto inc_exit_err; } if ( ! tkn_expect(tkn, TK_ID, &tk, perr) ) goto inc_exit_err; tk_inc.loc_start = tk.loc_start; tk_inc.loc_end = tk.loc_end; tk_inc.string = str_dup(tk.string); if ( ! tkn_expect(tkn, TK_DOT, NULL, perr) ) goto inc_exit_err; if ( ! tkn_expect_id(tkn, "h", NULL, perr) ) goto inc_exit_err; if ( ! tkn_expect(tkn, TK_R_ANG_BRACKET, NULL, perr) ) goto inc_exit_err; if ( ! tkn_expect(tkn, TK_NL, NULL, perr) ) goto inc_exit_err; inc_str_lit: if ( out_tk != NULL ) { *out_tk = tk_inc; } else { STR_DESTROY(tk_inc.string); } return true; inc_exit_err: STR_DESTROY(tk_inc.string); return false; } if ( str_eq_cstr(tk.string, "if", 2) == true ) { return true; } if ( str_eq_cstr(tk.string, "ifdef", 2) == true ) { return true; } if ( str_eq_cstr(tk.string, "ifndef", 2) == true ) { return true; } *perr = -1; fprintf(stderr, "%s ERROR: Invalid Pre-Compiler directive `%.*s`\n", tokenizer_token_loc_temp(tkn, &tk, TLF_VIM, NULL), (int) tk.string.size, tk.string.data); return false; } bool tkn_parse_var(struct tokenizer *tkn, struct token *out_tk, enum err *out_err) { enum err err = ERR_OK; enum err *perr = &err; struct token tk = {0}; struct var_decl *vd = NULL; struct { char *data; u64 size; u64 cap; } type = {0}; LIB_ARG_IF_NOT_NULL_MUST_BE(out_err, ERR_OK, false); LIB_ARG_MUST_NOT_BE_NULL(tkn, out_err, false); if ( out_err != NULL ) { perr = out_err; } vd = malloc(sizeof(*vd)); if ( vd == NULL ) { *perr = ERR_FAILED_ALLOC; goto exit_err; } memset(vd, 0, sizeof(*vd)); if ( ! tkn_expect(tkn, TK_ID, &tk, perr) ) goto exit_err; DA_APPEND_DATA(type, tk.string.data, tk.string.size, perr); DA_APPEND(type, ' ', perr); while ( true ) { /* bool is_name = false;*/ /*enum token_type ntt = tokenizer_peek_token_type(tkn, NULL);*/ /*is_name = ( ntt == TK_COMMA ) || ( ntt == TK_SEMICOLON ) || ( ntt == TK_ASTERISK ) || ( ntt == TK_EQUAL );*/ tk = tokenizer_next_token(tkn, NULL); switch ( tk.type ) { case TK_ID: break; case TK_ASTERISK: break; case TK_COMMA: break; case TK_SEMICOLON: break; default: fprintf(stderr, "%s Got wrong token, expected:" " TK_ID/TK_ASTERISK/TK_COMMA/TK_SEMICOLON, got: %s\n", tokenizer_token_loc_temp(tkn, &tk, TLF_VIM, NULL), token_to_cstr(tk.type)); goto exit_err; } } TODO("Rest of var decl!"); if ( ! tkn_expect(tkn, TK_ID, &tk, perr) ) goto exit_err; vd->name = str_dup(tk.string); if ( ! tkn_expect(tkn, TK_COMMA, NULL, perr) ) goto exit_err; if ( out_tk != NULL ) { *out_tk = tk; } exit_err: LIB_FREE(vd); LIB_FREE(type.data); if ( *perr == ERR_OK ) *perr = ERR_GENERAL_ERROR; return false; } bool tkn_parse_decl(struct tokenizer *tkn, struct token *out_tk, enum err *out_err) { struct token tk = {0}; struct token tk_type = {0}; enum err err = ERR_OK; enum err *perr = &err; LIB_ARG_IF_NOT_NULL_MUST_BE(out_err, ERR_OK, false); LIB_ARG_MUST_NOT_BE_NULL(tkn, out_err, false); if ( out_err != NULL ) { perr = out_err; } tk_type = tkn->last; if ( ! tkn_expect(tkn, TK_ID, &tk, perr) ) return false; switch ( tokenizer_next_token_type(tkn, perr) ) { case TK_L_BRACES: { struct token tk_fd = {0}; struct func_decl *fd = NULL; fd = malloc(sizeof(struct func_decl)); if ( fd == NULL ) { *out_err = ERR_FAILED_ALLOC; goto func_decl_exit_err; } memset(fd, 0, sizeof(*fd)); fd->args.cap = 32; fd->args.data = malloc(sizeof(*fd->args.data) * fd->args.cap); if ( fd->args.data == NULL ) { *out_err = ERR_FAILED_ALLOC; goto func_decl_exit_err; } memset(fd->args.data, 0, sizeof(*fd->args.data) * fd->args.cap); tk_fd.loc_start = tk_type.loc_start; fd->ret_type = str_dup(tk_type.string); fd->name = str_dup(tk.string); while ( true ) { struct token tk_vd = {0}; struct var_decl *vd = NULL; tkn_parse_var(tkn, &tk_vd, perr); if ( *perr != ERR_OK ) goto func_decl_exit_err; vd = tk_vd.extra; DA_APPEND(fd->args, *vd, perr); }; if ( out_tk != NULL ) { *out_tk = tk_fd; } else { func_decl_destroy(fd); } return true; func_decl_exit_err: func_decl_destroy(fd); return false; /* struct func_decl { struct str ret_type; struct str name; struct { struct str *data; u64 size; u64 cap; } args; }; */ } break; case TK_INVALID: fprintf(stderr, "Failed to get next token: %s\n", err_to_name[*perr]); return false; default: fprintf(stderr, "%s Got wrong token, expected: TK_L_BRACES, got: %s\n", tokenizer_token_loc_temp(tkn, &tk, TLF_VIM, NULL), token_to_cstr(tk.type)); } /* tk = tokenizer_next_token(tkn, perr); if ( *perr != ERR_OK ) { fprintf(stderr, "Failed to get next token: %s\n", err_to_name[*perr]); return false; } if ( tk.type != TK_ID ) { fprintf(stderr, "%s Got wrong token, expected: TK_ID, got: %s\n", tokenizer_token_loc_temp(tkn, &tk, TLF_VIM, NULL), token_to_cstr(tk.type)); return false; } */ UNUSED(tk_type); UNUSED(out_tk); *perr = ERR_OK; fprintf(stderr, "%s ERROR: Invalid declaration id `%.*s`\n", tokenizer_token_loc_temp(tkn, &tkn->last, TLF_VIM, NULL), (int) tk.string.size, tk.string.data); return false; } /* int main(void) { } */