mirror of
https://github.com/fish-shell/fish-shell.git
synced 2024-11-22 11:22:52 +08:00
ast: Stop dropping down to wchar_t needlessly
Very slight performance increase (1% when parsing *all .fish scripts in fish-shell*), but this removes a useless variable and some .c_str()inging. Theoretically it should also remove some wcslen() calls, but those seem to be optimized out?
This commit is contained in:
parent
389b75fe42
commit
66566c1650
16
src/ast.cpp
16
src/ast.cpp
|
@ -24,8 +24,8 @@ static tok_flags_t tokenizer_flags_from_parse_flags(parse_tree_flags_t flags) {
|
|||
}
|
||||
|
||||
// Given an expanded string, returns any keyword it matches.
|
||||
static parse_keyword_t keyword_with_name(const wchar_t *name) {
|
||||
return str_to_enum(name, keyword_enum_map, keyword_enum_map_len);
|
||||
static parse_keyword_t keyword_with_name(const wcstring &name) {
|
||||
return str_to_enum(name.c_str(), keyword_enum_map, keyword_enum_map_len);
|
||||
}
|
||||
|
||||
static bool is_keyword_char(wchar_t c) {
|
||||
|
@ -40,16 +40,14 @@ static parse_keyword_t keyword_for_token(token_type_t tok, const wcstring &token
|
|||
return parse_keyword_t::none;
|
||||
}
|
||||
|
||||
// If tok_txt is clean (which most are), we can compare it directly. Otherwise we have to expand
|
||||
// If token is clean (which most are), we can compare it directly. Otherwise we have to expand
|
||||
// it. We only expand quotes, and we don't want to do expensive expansions like tilde
|
||||
// expansions. So we do our own "cleanliness" check; if we find a character not in our allowed
|
||||
// set we know it's not a keyword, and if we never find a quote we don't have to expand! Note
|
||||
// that this lowercase set could be shrunk to be just the characters that are in keywords.
|
||||
parse_keyword_t result = parse_keyword_t::none;
|
||||
bool needs_expand = false, all_chars_valid = true;
|
||||
const wchar_t *tok_txt = token.c_str();
|
||||
for (size_t i = 0; tok_txt[i] != L'\0'; i++) {
|
||||
wchar_t c = tok_txt[i];
|
||||
for (wchar_t c : token) {
|
||||
if (!is_keyword_char(c)) {
|
||||
all_chars_valid = false;
|
||||
break;
|
||||
|
@ -61,11 +59,11 @@ static parse_keyword_t keyword_for_token(token_type_t tok, const wcstring &token
|
|||
if (all_chars_valid) {
|
||||
// Expand if necessary.
|
||||
if (!needs_expand) {
|
||||
result = keyword_with_name(tok_txt);
|
||||
result = keyword_with_name(token);
|
||||
} else {
|
||||
wcstring storage;
|
||||
if (unescape_string(tok_txt, &storage, 0)) {
|
||||
result = keyword_with_name(storage.c_str());
|
||||
if (unescape_string(token, &storage, 0)) {
|
||||
result = keyword_with_name(storage);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue
Block a user