From 7117c4a5ee4ebcdaa8ff692b9cc18fb87e6d8038 Mon Sep 17 00:00:00 2001 From: ridiculousfish Date: Wed, 21 Nov 2012 22:14:28 -0800 Subject: [PATCH] Remove tok_destroy --- builtin_commandline.cpp | 1 - complete.cpp | 2 -- fish_indent.cpp | 2 -- fish_tests.cpp | 2 -- highlight.cpp | 2 -- history.cpp | 1 - parse_util.cpp | 7 ------- parser.cpp | 11 ----------- reader.cpp | 2 -- tokenizer.cpp | 8 -------- tokenizer.h | 6 ------ 11 files changed, 44 deletions(-) diff --git a/builtin_commandline.cpp b/builtin_commandline.cpp index 07dc2c969..4b81dc6dd 100644 --- a/builtin_commandline.cpp +++ b/builtin_commandline.cpp @@ -177,7 +177,6 @@ static void write_part(const wchar_t *begin, stdout_buffer.append(out); free(buff); - tok_destroy(&tok); } else { diff --git a/complete.cpp b/complete.cpp index c21beafab..af70b24dd 100644 --- a/complete.cpp +++ b/complete.cpp @@ -1893,8 +1893,6 @@ void complete(const wcstring &cmd, std::vector &comps, complete_ty } - tok_destroy(&tok); - /* Get the string to complete */ diff --git a/fish_indent.cpp b/fish_indent.cpp index 1a7df1244..db48679b4 100644 --- a/fish_indent.cpp +++ b/fish_indent.cpp @@ -257,8 +257,6 @@ static int indent(wcstring &out, const wcstring &in, int flags) } - tok_destroy(&tok); - return res; } diff --git a/fish_tests.cpp b/fish_tests.cpp index a2ac00c47..d442ebaac 100644 --- a/fish_tests.cpp +++ b/fish_tests.cpp @@ -317,8 +317,6 @@ static void test_tok() This should crash if there is a bug. No reliable way to detect otherwise. */ say(L"Test destruction of broken tokenizer"); - tok_destroy(&t); - { const wchar_t *str = L"string &1 'nested \"quoted\" '(string containing subshells ){and,brackets}$as[$well (as variable arrays)] not_a_redirect^ ^ ^^is_a_redirect"; diff --git a/highlight.cpp b/highlight.cpp index 666f6dbaa..8976b253a 100644 --- a/highlight.cpp +++ b/highlight.cpp @@ -785,7 +785,6 @@ static bool autosuggest_parse_command(const wcstring &str, wcstring *out_command } } } - tok_destroy(&tok); /* Remember our command if we have one */ if (had_cmd) @@ -1301,7 +1300,6 @@ static void tokenize(const wchar_t * const buff, std::vector &color, const } } } - tok_destroy(&tok); } diff --git a/history.cpp b/history.cpp index 0a87e8b9e..88b689883 100644 --- a/history.cpp +++ b/history.cpp @@ -1432,7 +1432,6 @@ void history_t::add_with_file_detection(const wcstring &str) } } } - tok_destroy(&tokenizer); if (! potential_paths.empty()) { diff --git a/parse_util.cpp b/parse_util.cpp index 10072ec86..11abd912d 100644 --- a/parse_util.cpp +++ b/parse_util.cpp @@ -404,9 +404,6 @@ static void job_or_process_extent(const wchar_t *buff, } free(buffcpy); - - tok_destroy(&tok); - } void parse_util_process_extent(const wchar_t *buff, @@ -517,8 +514,6 @@ void parse_util_token_extent(const wchar_t *buff, free(buffcpy); - tok_destroy(&tok); - if (tok_begin) { *tok_begin = a; @@ -721,8 +716,6 @@ void parse_util_get_parameter_info(const wcstring &cmd, const size_t pos, wchar_ prev_pos = tok_get_pos(&tok); } - tok_destroy(&tok); - wchar_t *cmd_tmp = wcsdup(cmd.c_str()); cmd_tmp[pos]=0; size_t cmdlen = wcslen(cmd_tmp); diff --git a/parser.cpp b/parser.cpp index 8a1d29846..0f8950f05 100644 --- a/parser.cpp +++ b/parser.cpp @@ -597,8 +597,6 @@ static const wchar_t *parser_find_end(const wchar_t * buff) } } - - tok_destroy(&tok); if (!count && !error) { @@ -872,8 +870,6 @@ int parser_t::eval_args(const wchar_t *line, std::vector &args) if (show_errors) this->print_errors_stderr(); - tok_destroy(&tok); - current_tokenizer=previous_tokenizer; current_tokenizer_pos = previous_pos; @@ -2234,8 +2230,6 @@ int parser_t::parse_job(process_t *p, } } } - - tok_destroy(&subtok); } if (make_sub_block) @@ -2721,7 +2715,6 @@ int parser_t::eval(const wcstring &cmdStr, const io_chain_t &io, enum block_type this->print_errors_stderr(); - tok_destroy(current_tokenizer); delete current_tokenizer; while (forbidden_function.size() > forbid_count) @@ -2960,8 +2953,6 @@ int parser_t::test_args(const wchar_t * buff, wcstring *out, const wchar_t *pre } } - tok_destroy(&tok); - current_tokenizer = previous_tokenizer; current_tokenizer_pos = previous_pos; @@ -3763,8 +3754,6 @@ int parser_t::test(const wchar_t * buff, Cleanup */ - tok_destroy(&tok); - current_tokenizer=previous_tokenizer; current_tokenizer_pos = previous_pos; diff --git a/reader.cpp b/reader.cpp index df39f3a29..a925527f8 100644 --- a/reader.cpp +++ b/reader.cpp @@ -1921,8 +1921,6 @@ static void handle_token_history(int forward, int reset) } } } - - tok_destroy(&tok); } if (str) diff --git a/tokenizer.cpp b/tokenizer.cpp index e555ba2b6..378fed1dd 100644 --- a/tokenizer.cpp +++ b/tokenizer.cpp @@ -110,11 +110,6 @@ tokenizer_t::tokenizer_t(const wchar_t *b, tok_flags_t flags) : buff(NULL), orig tok_next(this); } -void tok_destroy(tokenizer_t *tok) -{ - CHECK(tok,); -} - int tok_last_type(tokenizer_t *tok) { CHECK(tok, TOK_ERROR); @@ -664,7 +659,6 @@ wchar_t *tok_first(const wchar_t *str) break; } - tok_destroy(&t); return res; } @@ -722,8 +716,6 @@ int main(int argc, char **argv) break; } } - tok_destroy(&tok); - } } diff --git a/tokenizer.h b/tokenizer.h index 7c2b71dca..0ff7f10ef 100644 --- a/tokenizer.h +++ b/tokenizer.h @@ -145,12 +145,6 @@ int tok_has_next(tokenizer_t *tok); */ int tok_get_pos(tokenizer_t *tok); -/** - Destroy the tokenizer and free asociated memory -*/ -void tok_destroy(tokenizer_t *tok); - - /** Returns the original string to tokenizer */