Remove tok_destroy

This commit is contained in:
ridiculousfish 2012-11-21 22:14:28 -08:00
parent a3a7c48799
commit 7117c4a5ee
11 changed files with 0 additions and 44 deletions

View File

@ -177,7 +177,6 @@ static void write_part(const wchar_t *begin,
stdout_buffer.append(out); stdout_buffer.append(out);
free(buff); free(buff);
tok_destroy(&tok);
} }
else else
{ {

View File

@ -1893,8 +1893,6 @@ void complete(const wcstring &cmd, std::vector<completion_t> &comps, complete_ty
} }
tok_destroy(&tok);
/* /*
Get the string to complete Get the string to complete
*/ */

View File

@ -257,8 +257,6 @@ static int indent(wcstring &out, const wcstring &in, int flags)
} }
tok_destroy(&tok);
return res; return res;
} }

View File

@ -317,8 +317,6 @@ static void test_tok()
This should crash if there is a bug. No reliable way to detect otherwise. This should crash if there is a bug. No reliable way to detect otherwise.
*/ */
say(L"Test destruction of broken tokenizer"); say(L"Test destruction of broken tokenizer");
tok_destroy(&t);
{ {
const wchar_t *str = L"string <redirection 2>&1 'nested \"quoted\" '(string containing subshells ){and,brackets}$as[$well (as variable arrays)] not_a_redirect^ ^ ^^is_a_redirect"; const wchar_t *str = L"string <redirection 2>&1 'nested \"quoted\" '(string containing subshells ){and,brackets}$as[$well (as variable arrays)] not_a_redirect^ ^ ^^is_a_redirect";

View File

@ -785,7 +785,6 @@ static bool autosuggest_parse_command(const wcstring &str, wcstring *out_command
} }
} }
} }
tok_destroy(&tok);
/* Remember our command if we have one */ /* Remember our command if we have one */
if (had_cmd) if (had_cmd)
@ -1301,7 +1300,6 @@ static void tokenize(const wchar_t * const buff, std::vector<int> &color, const
} }
} }
} }
tok_destroy(&tok);
} }

View File

@ -1432,7 +1432,6 @@ void history_t::add_with_file_detection(const wcstring &str)
} }
} }
} }
tok_destroy(&tokenizer);
if (! potential_paths.empty()) if (! potential_paths.empty())
{ {

View File

@ -404,9 +404,6 @@ static void job_or_process_extent(const wchar_t *buff,
} }
free(buffcpy); free(buffcpy);
tok_destroy(&tok);
} }
void parse_util_process_extent(const wchar_t *buff, void parse_util_process_extent(const wchar_t *buff,
@ -517,8 +514,6 @@ void parse_util_token_extent(const wchar_t *buff,
free(buffcpy); free(buffcpy);
tok_destroy(&tok);
if (tok_begin) if (tok_begin)
{ {
*tok_begin = a; *tok_begin = a;
@ -721,8 +716,6 @@ void parse_util_get_parameter_info(const wcstring &cmd, const size_t pos, wchar_
prev_pos = tok_get_pos(&tok); prev_pos = tok_get_pos(&tok);
} }
tok_destroy(&tok);
wchar_t *cmd_tmp = wcsdup(cmd.c_str()); wchar_t *cmd_tmp = wcsdup(cmd.c_str());
cmd_tmp[pos]=0; cmd_tmp[pos]=0;
size_t cmdlen = wcslen(cmd_tmp); size_t cmdlen = wcslen(cmd_tmp);

View File

@ -597,8 +597,6 @@ static const wchar_t *parser_find_end(const wchar_t * buff)
} }
} }
tok_destroy(&tok);
if (!count && !error) if (!count && !error)
{ {
@ -872,8 +870,6 @@ int parser_t::eval_args(const wchar_t *line, std::vector<completion_t> &args)
if (show_errors) if (show_errors)
this->print_errors_stderr(); this->print_errors_stderr();
tok_destroy(&tok);
current_tokenizer=previous_tokenizer; current_tokenizer=previous_tokenizer;
current_tokenizer_pos = previous_pos; current_tokenizer_pos = previous_pos;
@ -2234,8 +2230,6 @@ int parser_t::parse_job(process_t *p,
} }
} }
} }
tok_destroy(&subtok);
} }
if (make_sub_block) if (make_sub_block)
@ -2721,7 +2715,6 @@ int parser_t::eval(const wcstring &cmdStr, const io_chain_t &io, enum block_type
this->print_errors_stderr(); this->print_errors_stderr();
tok_destroy(current_tokenizer);
delete current_tokenizer; delete current_tokenizer;
while (forbidden_function.size() > forbid_count) while (forbidden_function.size() > forbid_count)
@ -2960,8 +2953,6 @@ int parser_t::test_args(const wchar_t * buff, wcstring *out, const wchar_t *pre
} }
} }
tok_destroy(&tok);
current_tokenizer = previous_tokenizer; current_tokenizer = previous_tokenizer;
current_tokenizer_pos = previous_pos; current_tokenizer_pos = previous_pos;
@ -3763,8 +3754,6 @@ int parser_t::test(const wchar_t * buff,
Cleanup Cleanup
*/ */
tok_destroy(&tok);
current_tokenizer=previous_tokenizer; current_tokenizer=previous_tokenizer;
current_tokenizer_pos = previous_pos; current_tokenizer_pos = previous_pos;

View File

@ -1921,8 +1921,6 @@ static void handle_token_history(int forward, int reset)
} }
} }
} }
tok_destroy(&tok);
} }
if (str) if (str)

View File

@ -110,11 +110,6 @@ tokenizer_t::tokenizer_t(const wchar_t *b, tok_flags_t flags) : buff(NULL), orig
tok_next(this); tok_next(this);
} }
void tok_destroy(tokenizer_t *tok)
{
CHECK(tok,);
}
int tok_last_type(tokenizer_t *tok) int tok_last_type(tokenizer_t *tok)
{ {
CHECK(tok, TOK_ERROR); CHECK(tok, TOK_ERROR);
@ -664,7 +659,6 @@ wchar_t *tok_first(const wchar_t *str)
break; break;
} }
tok_destroy(&t);
return res; return res;
} }
@ -722,8 +716,6 @@ int main(int argc, char **argv)
break; break;
} }
} }
tok_destroy(&tok);
} }
} }

View File

@ -145,12 +145,6 @@ int tok_has_next(tokenizer_t *tok);
*/ */
int tok_get_pos(tokenizer_t *tok); int tok_get_pos(tokenizer_t *tok);
/**
Destroy the tokenizer and free asociated memory
*/
void tok_destroy(tokenizer_t *tok);
/** /**
Returns the original string to tokenizer Returns the original string to tokenizer
*/ */