Make parse_keyword_t an enum class

This commit is contained in:
ridiculousfish 2020-06-09 15:13:02 -07:00
parent b4351c5927
commit 19293ec2d6
7 changed files with 130 additions and 120 deletions

View File

@ -255,7 +255,8 @@ void prettifier_t::prettify_node(const parse_node_tree_t &tree, node_offset_t no
if (last_was_semicolon) { if (last_was_semicolon) {
// We keep the semicolon for `; and` and `; or`, // We keep the semicolon for `; and` and `; or`,
// others we turn into newlines. // others we turn into newlines.
if (node.keyword != parse_keyword_and && node.keyword != parse_keyword_or) { if (node.keyword != parse_keyword_t::kw_and &&
node.keyword != parse_keyword_t::kw_or) {
append_newline(); append_newline();
} else { } else {
output.push_back(L';'); output.push_back(L';');

View File

@ -100,47 +100,49 @@ const enum_map<parse_token_type_t> token_enum_map[] = {
// array below. // array below.
// //
// IMPORTANT: These enums must start at zero. // IMPORTANT: These enums must start at zero.
enum parse_keyword_t : uint8_t { enum class parse_keyword_t : uint8_t {
parse_keyword_and, // 'none' is not a keyword, it is a sentinel indicating nothing.
parse_keyword_begin, none,
parse_keyword_builtin,
parse_keyword_case, kw_and,
parse_keyword_command, kw_begin,
parse_keyword_else, kw_builtin,
parse_keyword_end, kw_case,
parse_keyword_exclam, kw_command,
parse_keyword_exec, kw_else,
parse_keyword_for, kw_end,
parse_keyword_function, kw_exclam,
parse_keyword_if, kw_exec,
parse_keyword_in, kw_for,
parse_keyword_none, kw_function,
parse_keyword_not, kw_if,
parse_keyword_or, kw_in,
parse_keyword_switch, kw_not,
parse_keyword_time, kw_or,
parse_keyword_while, kw_switch,
kw_time,
kw_while,
}; };
const enum_map<parse_keyword_t> keyword_enum_map[] = {{parse_keyword_exclam, L"!"}, const enum_map<parse_keyword_t> keyword_enum_map[] = {{parse_keyword_t::kw_exclam, L"!"},
{parse_keyword_and, L"and"}, {parse_keyword_t::kw_and, L"and"},
{parse_keyword_begin, L"begin"}, {parse_keyword_t::kw_begin, L"begin"},
{parse_keyword_builtin, L"builtin"}, {parse_keyword_t::kw_builtin, L"builtin"},
{parse_keyword_case, L"case"}, {parse_keyword_t::kw_case, L"case"},
{parse_keyword_command, L"command"}, {parse_keyword_t::kw_command, L"command"},
{parse_keyword_else, L"else"}, {parse_keyword_t::kw_else, L"else"},
{parse_keyword_end, L"end"}, {parse_keyword_t::kw_end, L"end"},
{parse_keyword_exec, L"exec"}, {parse_keyword_t::kw_exec, L"exec"},
{parse_keyword_for, L"for"}, {parse_keyword_t::kw_for, L"for"},
{parse_keyword_function, L"function"}, {parse_keyword_t::kw_function, L"function"},
{parse_keyword_if, L"if"}, {parse_keyword_t::kw_if, L"if"},
{parse_keyword_in, L"in"}, {parse_keyword_t::kw_in, L"in"},
{parse_keyword_not, L"not"}, {parse_keyword_t::kw_not, L"not"},
{parse_keyword_or, L"or"}, {parse_keyword_t::kw_or, L"or"},
{parse_keyword_switch, L"switch"}, {parse_keyword_t::kw_switch, L"switch"},
{parse_keyword_time, L"time"}, {parse_keyword_t::kw_time, L"time"},
{parse_keyword_while, L"while"}, {parse_keyword_t::kw_while, L"while"},
{parse_keyword_none, nullptr}}; {parse_keyword_t::none, nullptr}};
#define keyword_enum_map_len (sizeof keyword_enum_map / sizeof *keyword_enum_map) #define keyword_enum_map_len (sizeof keyword_enum_map / sizeof *keyword_enum_map)
// Node tag values. // Node tag values.

View File

@ -46,7 +46,7 @@ struct keyword {
static constexpr parse_token_type_t token = parse_token_type_string; static constexpr parse_token_type_t token = parse_token_type_string;
static constexpr production_element_t element() { static constexpr production_element_t element() {
// Convert a parse_keyword_t enum to a production_element_t enum. // Convert a parse_keyword_t enum to a production_element_t enum.
return Keyword + LAST_TOKEN_OR_SYMBOL + 1; return static_cast<uint32_t>(Keyword) + LAST_TOKEN_OR_SYMBOL + 1;
} }
}; };
@ -208,8 +208,8 @@ DEF_ALT(job_list) {
// Job decorators are 'and' and 'or'. These apply to the whole job. // Job decorators are 'and' and 'or'. These apply to the whole job.
DEF_ALT(job_decorator) { DEF_ALT(job_decorator) {
using ands = single<keyword<parse_keyword_and>>; using ands = single<keyword<parse_keyword_t::kw_and>>;
using ors = single<keyword<parse_keyword_or>>; using ors = single<keyword<parse_keyword_t::kw_or>>;
using empty = grammar::empty; using empty = grammar::empty;
ALT_BODY(job_decorator, ands, ors, empty); ALT_BODY(job_decorator, ands, ors, empty);
}; };
@ -227,7 +227,7 @@ DEF_ALT(job_conjunction_continuation) {
/// The time builtin. /// The time builtin.
DEF_ALT(optional_time) { DEF_ALT(optional_time) {
using empty = grammar::empty; using empty = grammar::empty;
using time = single<keyword<parse_keyword_time>>; using time = single<keyword<parse_keyword_t::kw_time>>;
ALT_BODY(optional_time, empty, time); ALT_BODY(optional_time, empty, time);
}; };
@ -271,12 +271,12 @@ produces_sequence<if_clause, else_clause, end_command, arguments_or_redirections
BODY(if_statement)}; BODY(if_statement)};
DEF(if_clause) DEF(if_clause)
produces_sequence<keyword<parse_keyword_if>, job_conjunction, tok_end, andor_job_list, job_list>{ produces_sequence<keyword<parse_keyword_t::kw_if>, job_conjunction, tok_end, andor_job_list,
BODY(if_clause)}; job_list>{BODY(if_clause)};
DEF_ALT(else_clause) { DEF_ALT(else_clause) {
using empty = grammar::empty; using empty = grammar::empty;
using else_cont = seq<keyword<parse_keyword_else>, else_continuation>; using else_cont = seq<keyword<parse_keyword_t::kw_else>, else_continuation>;
ALT_BODY(else_clause, empty, else_cont); ALT_BODY(else_clause, empty, else_cont);
}; };
@ -287,8 +287,8 @@ DEF_ALT(else_continuation) {
}; };
DEF(switch_statement) DEF(switch_statement)
produces_sequence<keyword<parse_keyword_switch>, argument, tok_end, case_item_list, end_command, produces_sequence<keyword<parse_keyword_t::kw_switch>, argument, tok_end, case_item_list,
arguments_or_redirections_list>{BODY(switch_statement)}; end_command, arguments_or_redirections_list>{BODY(switch_statement)};
DEF_ALT(case_item_list) { DEF_ALT(case_item_list) {
using empty = grammar::empty; using empty = grammar::empty;
@ -298,7 +298,8 @@ DEF_ALT(case_item_list) {
}; };
DEF(case_item) DEF(case_item)
produces_sequence<keyword<parse_keyword_case>, argument_list, tok_end, job_list>{BODY(case_item)}; produces_sequence<keyword<parse_keyword_t::kw_case>, argument_list, tok_end, job_list>{
BODY(case_item)};
DEF(block_statement) DEF(block_statement)
produces_sequence<block_header, job_list, end_command, arguments_or_redirections_list>{ produces_sequence<block_header, job_list, end_command, arguments_or_redirections_list>{
@ -313,24 +314,25 @@ DEF_ALT(block_header) {
}; };
DEF(for_header) DEF(for_header)
produces_sequence<keyword<parse_keyword_for>, tok_string, keyword<parse_keyword_in>, argument_list, produces_sequence<keyword<parse_keyword_t::kw_for>, tok_string, keyword<parse_keyword_t::kw_in>,
tok_end>{BODY(for_header)}; argument_list, tok_end>{BODY(for_header)};
DEF(while_header) DEF(while_header)
produces_sequence<keyword<parse_keyword_while>, job_conjunction, tok_end, andor_job_list>{ produces_sequence<keyword<parse_keyword_t::kw_while>, job_conjunction, tok_end, andor_job_list>{
BODY(while_header)}; BODY(while_header)};
DEF(begin_header) produces_single<keyword<parse_keyword_begin>>{BODY(begin_header)}; DEF(begin_header) produces_single<keyword<parse_keyword_t::kw_begin>>{BODY(begin_header)};
// Functions take arguments, and require at least one (the name). No redirections allowed. // Functions take arguments, and require at least one (the name). No redirections allowed.
DEF(function_header) DEF(function_header)
produces_sequence<keyword<parse_keyword_function>, argument, argument_list, tok_end>{ produces_sequence<keyword<parse_keyword_t::kw_function>, argument, argument_list, tok_end>{
BODY(function_header)}; BODY(function_header)};
DEF_ALT(not_statement) { DEF_ALT(not_statement) {
using nots = seq<keyword<parse_keyword_not>, variable_assignments, optional_time, statement>; using nots =
seq<keyword<parse_keyword_t::kw_not>, variable_assignments, optional_time, statement>;
using exclams = using exclams =
seq<keyword<parse_keyword_exclam>, variable_assignments, optional_time, statement>; seq<keyword<parse_keyword_t::kw_exclam>, variable_assignments, optional_time, statement>;
ALT_BODY(not_statement, nots, exclams); ALT_BODY(not_statement, nots, exclams);
}; };
@ -347,9 +349,9 @@ DEF_ALT(andor_job_list) {
// "builtin" or "command" or "exec" // "builtin" or "command" or "exec"
DEF_ALT(decorated_statement) { DEF_ALT(decorated_statement) {
using plains = single<plain_statement>; using plains = single<plain_statement>;
using cmds = seq<keyword<parse_keyword_command>, plain_statement>; using cmds = seq<keyword<parse_keyword_t::kw_command>, plain_statement>;
using builtins = seq<keyword<parse_keyword_builtin>, plain_statement>; using builtins = seq<keyword<parse_keyword_t::kw_builtin>, plain_statement>;
using execs = seq<keyword<parse_keyword_exec>, plain_statement>; using execs = seq<keyword<parse_keyword_t::kw_exec>, plain_statement>;
ALT_BODY(decorated_statement, plains, cmds, builtins, execs); ALT_BODY(decorated_statement, plains, cmds, builtins, execs);
}; };
@ -378,7 +380,7 @@ DEF_ALT(optional_background) {
ALT_BODY(optional_background, empty, background); ALT_BODY(optional_background, empty, background);
}; };
DEF(end_command) produces_single<keyword<parse_keyword_end>>{BODY(end_command)}; DEF(end_command) produces_single<keyword<parse_keyword_t::kw_end>>{BODY(end_command)};
// Note optional_newlines only allows newline-style tok_end, not semicolons. // Note optional_newlines only allows newline-style tok_end, not semicolons.
DEF_ALT(optional_newlines) { DEF_ALT(optional_newlines) {

View File

@ -38,9 +38,9 @@ RESOLVE(job_list) {
case parse_token_type_string: { case parse_token_type_string: {
// Some keywords are special. // Some keywords are special.
switch (token1.keyword) { switch (token1.keyword) {
case parse_keyword_end: case parse_keyword_t::kw_end:
case parse_keyword_else: case parse_keyword_t::kw_else:
case parse_keyword_case: { case parse_keyword_t::kw_case: {
return production_for<empty>(); // end this job list return production_for<empty>(); // end this job list
} }
default: { default: {
@ -74,11 +74,11 @@ RESOLVE(job_decorator) {
} }
switch (token1.keyword) { switch (token1.keyword) {
case parse_keyword_and: { case parse_keyword_t::kw_and: {
*out_tag = parse_job_decoration_and; *out_tag = parse_job_decoration_and;
return production_for<ands>(); return production_for<ands>();
} }
case parse_keyword_or: { case parse_keyword_t::kw_or: {
*out_tag = parse_job_decoration_or; *out_tag = parse_job_decoration_or;
return production_for<ors>(); return production_for<ors>();
} }
@ -132,16 +132,16 @@ RESOLVE(statement) {
if (token1.type == parse_token_type_string) { if (token1.type == parse_token_type_string) {
// If we are a function, then look for help arguments. Otherwise, if the next token looks // If we are a function, then look for help arguments. Otherwise, if the next token looks
// like an option (starts with a dash), then parse it as a decorated statement. // like an option (starts with a dash), then parse it as a decorated statement.
if (token1.keyword == parse_keyword_function && token2.is_help_argument) { if (token1.keyword == parse_keyword_t::kw_function && token2.is_help_argument) {
return production_for<decorated>(); return production_for<decorated>();
} else if (token1.keyword != parse_keyword_function && token2.has_dash_prefix) { } else if (token1.keyword != parse_keyword_t::kw_function && token2.has_dash_prefix) {
return production_for<decorated>(); return production_for<decorated>();
} }
// Likewise if the next token doesn't look like an argument at all. This corresponds to e.g. // Likewise if the next token doesn't look like an argument at all. This corresponds to e.g.
// a "naked if". // a "naked if".
bool naked_invocation_invokes_help = bool naked_invocation_invokes_help = (token1.keyword != parse_keyword_t::kw_begin &&
(token1.keyword != parse_keyword_begin && token1.keyword != parse_keyword_end); token1.keyword != parse_keyword_t::kw_end);
if (naked_invocation_invokes_help && if (naked_invocation_invokes_help &&
(token2.type == parse_token_type_end || token2.type == parse_token_type_terminate)) { (token2.type == parse_token_type_end || token2.type == parse_token_type_terminate)) {
return production_for<decorated>(); return production_for<decorated>();
@ -151,26 +151,26 @@ RESOLVE(statement) {
switch (token1.type) { switch (token1.type) {
case parse_token_type_string: { case parse_token_type_string: {
switch (token1.keyword) { switch (token1.keyword) {
case parse_keyword_not: case parse_keyword_t::kw_not:
case parse_keyword_exclam: { case parse_keyword_t::kw_exclam: {
return production_for<nots>(); return production_for<nots>();
} }
case parse_keyword_for: case parse_keyword_t::kw_for:
case parse_keyword_while: case parse_keyword_t::kw_while:
case parse_keyword_function: case parse_keyword_t::kw_function:
case parse_keyword_begin: { case parse_keyword_t::kw_begin: {
return production_for<block>(); return production_for<block>();
} }
case parse_keyword_if: { case parse_keyword_t::kw_if: {
return production_for<ifs>(); return production_for<ifs>();
} }
case parse_keyword_else: { case parse_keyword_t::kw_else: {
return NO_PRODUCTION; return NO_PRODUCTION;
} }
case parse_keyword_switch: { case parse_keyword_t::kw_switch: {
return production_for<switchs>(); return production_for<switchs>();
} }
case parse_keyword_end: { case parse_keyword_t::kw_end: {
return NO_PRODUCTION; return NO_PRODUCTION;
} }
// All other keywords fall through to decorated statement. // All other keywords fall through to decorated statement.
@ -196,7 +196,7 @@ RESOLVE(else_clause) {
UNUSED(out_tag); UNUSED(out_tag);
switch (token1.keyword) { switch (token1.keyword) {
case parse_keyword_else: { case parse_keyword_t::kw_else: {
return production_for<else_cont>(); return production_for<else_cont>();
} }
default: { default: {
@ -210,7 +210,7 @@ RESOLVE(else_continuation) {
UNUSED(out_tag); UNUSED(out_tag);
switch (token1.keyword) { switch (token1.keyword) {
case parse_keyword_if: { case parse_keyword_t::kw_if: {
return production_for<else_if>(); return production_for<else_if>();
} }
default: { default: {
@ -223,7 +223,7 @@ RESOLVE(case_item_list) {
UNUSED(token2); UNUSED(token2);
UNUSED(out_tag); UNUSED(out_tag);
if (token1.keyword == parse_keyword_case) if (token1.keyword == parse_keyword_t::kw_case)
return production_for<case_items>(); return production_for<case_items>();
else if (token1.type == parse_token_type_end) else if (token1.type == parse_token_type_end)
return production_for<blank_line>(); return production_for<blank_line>();
@ -235,9 +235,9 @@ RESOLVE(not_statement) {
UNUSED(token2); UNUSED(token2);
UNUSED(out_tag); UNUSED(out_tag);
switch (token1.keyword) { switch (token1.keyword) {
case parse_keyword_not: case parse_keyword_t::kw_not:
return production_for<nots>(); return production_for<nots>();
case parse_keyword_exclam: case parse_keyword_t::kw_exclam:
return production_for<exclams>(); return production_for<exclams>();
default: default:
return NO_PRODUCTION; return NO_PRODUCTION;
@ -249,7 +249,8 @@ RESOLVE(andor_job_list) {
if (token1.type == parse_token_type_end) { if (token1.type == parse_token_type_end) {
return production_for<empty_line>(); return production_for<empty_line>();
} else if (token1.keyword == parse_keyword_and || token1.keyword == parse_keyword_or) { } else if (token1.keyword == parse_keyword_t::kw_and ||
token1.keyword == parse_keyword_t::kw_or) {
// Check that the argument to and/or is a string that's not help. Otherwise it's either 'and // Check that the argument to and/or is a string that's not help. Otherwise it's either 'and
// --help' or a naked 'and', and not part of this list. // --help' or a naked 'and', and not part of this list.
if (token2.type == parse_token_type_string && !token2.is_help_argument) { if (token2.type == parse_token_type_string && !token2.is_help_argument) {
@ -295,16 +296,16 @@ RESOLVE(block_header) {
UNUSED(out_tag); UNUSED(out_tag);
switch (token1.keyword) { switch (token1.keyword) {
case parse_keyword_for: { case parse_keyword_t::kw_for: {
return production_for<forh>(); return production_for<forh>();
} }
case parse_keyword_while: { case parse_keyword_t::kw_while: {
return production_for<whileh>(); return production_for<whileh>();
} }
case parse_keyword_function: { case parse_keyword_t::kw_function: {
return production_for<funch>(); return production_for<funch>();
} }
case parse_keyword_begin: { case parse_keyword_t::kw_begin: {
return production_for<beginh>(); return production_for<beginh>();
} }
default: { default: {
@ -327,7 +328,7 @@ RESOLVE(decorated_statement) {
// and/or are typically parsed in job_conjunction at the beginning of a job // and/or are typically parsed in job_conjunction at the beginning of a job
// However they may be reached here through e.g. true && and false. // However they may be reached here through e.g. true && and false.
// Refuse to parse them as a command except for --help. See #6089. // Refuse to parse them as a command except for --help. See #6089.
if ((token1.keyword == parse_keyword_and || token1.keyword == parse_keyword_or) && if ((token1.keyword == parse_keyword_t::kw_and || token1.keyword == parse_keyword_t::kw_or) &&
!token2.is_help_argument) { !token2.is_help_argument) {
return NO_PRODUCTION; return NO_PRODUCTION;
} }
@ -340,15 +341,15 @@ RESOLVE(decorated_statement) {
} }
switch (token1.keyword) { switch (token1.keyword) {
case parse_keyword_command: { case parse_keyword_t::kw_command: {
*out_tag = parse_statement_decoration_command; *out_tag = parse_statement_decoration_command;
return production_for<cmds>(); return production_for<cmds>();
} }
case parse_keyword_builtin: { case parse_keyword_t::kw_builtin: {
*out_tag = parse_statement_decoration_builtin; *out_tag = parse_statement_decoration_builtin;
return production_for<builtins>(); return production_for<builtins>();
} }
case parse_keyword_exec: { case parse_keyword_t::kw_exec: {
*out_tag = parse_statement_decoration_exec; *out_tag = parse_statement_decoration_exec;
return production_for<execs>(); return production_for<execs>();
} }
@ -396,7 +397,7 @@ RESOLVE(optional_background) {
} }
RESOLVE(optional_time) { RESOLVE(optional_time) {
if (token1.keyword == parse_keyword_time && !token2.is_help_argument) { if (token1.keyword == parse_keyword_t::kw_time && !token2.is_help_argument) {
*out_tag = parse_optional_time_time; *out_tag = parse_optional_time_time;
return production_for<time>(); return production_for<time>();
} }

View File

@ -30,7 +30,7 @@ inline parse_keyword_t production_element_keyword(production_element_t elem) {
// First keyword is LAST_TOKEN_OR_SYMBOL + 1. // First keyword is LAST_TOKEN_OR_SYMBOL + 1.
return static_cast<parse_keyword_t>(elem - LAST_TOKEN_OR_SYMBOL - 1); return static_cast<parse_keyword_t>(elem - LAST_TOKEN_OR_SYMBOL - 1);
} else { } else {
return parse_keyword_none; return parse_keyword_t::none;
} }
} }

View File

@ -169,8 +169,8 @@ const wchar_t *keyword_description(parse_keyword_t type) {
} }
static wcstring token_type_user_presentable_description( static wcstring token_type_user_presentable_description(
parse_token_type_t type, parse_keyword_t keyword = parse_keyword_none) { parse_token_type_t type, parse_keyword_t keyword = parse_keyword_t::none) {
if (keyword != parse_keyword_none) { if (keyword != parse_keyword_t::none) {
return format_string(L"keyword '%ls'", keyword_description(keyword)); return format_string(L"keyword '%ls'", keyword_description(keyword));
} }
@ -241,7 +241,7 @@ wcstring parse_node_t::describe() const {
/// Returns a string description of the given parse token. /// Returns a string description of the given parse token.
wcstring parse_token_t::describe() const { wcstring parse_token_t::describe() const {
wcstring result = token_type_description(type); wcstring result = token_type_description(type);
if (keyword != parse_keyword_none) { if (keyword != parse_keyword_t::none) {
append_format(result, L" <%ls>", keyword_description(keyword)); append_format(result, L" <%ls>", keyword_description(keyword));
} }
return result; return result;
@ -366,14 +366,14 @@ struct parse_stack_element_t {
node_offset_t node_idx; node_offset_t node_idx;
explicit parse_stack_element_t(parse_token_type_t t, node_offset_t idx) explicit parse_stack_element_t(parse_token_type_t t, node_offset_t idx)
: type(t), keyword(parse_keyword_none), node_idx(idx) {} : type(t), keyword(parse_keyword_t::none), node_idx(idx) {}
explicit parse_stack_element_t(production_element_t e, node_offset_t idx) explicit parse_stack_element_t(production_element_t e, node_offset_t idx)
: type(production_element_type(e)), keyword(production_element_keyword(e)), node_idx(idx) {} : type(production_element_type(e)), keyword(production_element_keyword(e)), node_idx(idx) {}
wcstring describe() const { wcstring describe() const {
wcstring result = token_type_description(type); wcstring result = token_type_description(type);
if (keyword != parse_keyword_none) { if (keyword != parse_keyword_t::none) {
append_format(result, L" <%ls>", keyword_description(keyword)); append_format(result, L" <%ls>", keyword_description(keyword));
} }
return result; return result;
@ -668,16 +668,16 @@ void parse_ll_t::parse_error_unbalancing_token(parse_token_t token) {
this->fatal_errored = true; this->fatal_errored = true;
if (this->should_generate_error_messages) { if (this->should_generate_error_messages) {
switch (token.keyword) { switch (token.keyword) {
case parse_keyword_end: { case parse_keyword_t::kw_end: {
this->parse_error(token, parse_error_unbalancing_end, L"'end' outside of a block"); this->parse_error(token, parse_error_unbalancing_end, L"'end' outside of a block");
break; break;
} }
case parse_keyword_else: { case parse_keyword_t::kw_else: {
this->parse_error(token, parse_error_unbalancing_else, this->parse_error(token, parse_error_unbalancing_else,
L"'else' builtin not inside of if block"); L"'else' builtin not inside of if block");
break; break;
} }
case parse_keyword_case: { case parse_keyword_t::kw_case: {
this->parse_error(token, parse_error_unbalancing_case, this->parse_error(token, parse_error_unbalancing_case,
L"'case' builtin not inside of switch block"); L"'case' builtin not inside of switch block");
break; break;
@ -823,7 +823,7 @@ bool parse_ll_t::top_node_handle_terminal_types(const parse_token_t &token) {
if (stack_top.type == parse_token_type_string) { if (stack_top.type == parse_token_type_string) {
// We matched if the keywords match, or no keyword was required. // We matched if the keywords match, or no keyword was required.
matched = matched =
(stack_top.keyword == parse_keyword_none || stack_top.keyword == token.keyword); (stack_top.keyword == parse_keyword_t::none || stack_top.keyword == token.keyword);
} else { } else {
// For other types, we only require that the types match. // For other types, we only require that the types match.
matched = true; matched = true;
@ -843,19 +843,20 @@ bool parse_ll_t::top_node_handle_terminal_types(const parse_token_t &token) {
// Failure // Failure
if (stack_top.type == parse_token_type_string && token.type == parse_token_type_string) { if (stack_top.type == parse_token_type_string && token.type == parse_token_type_string) {
// Keyword failure. We should unify this with the 'matched' computation above. // Keyword failure. We should unify this with the 'matched' computation above.
assert(stack_top.keyword != parse_keyword_none && stack_top.keyword != token.keyword); assert(stack_top.keyword != parse_keyword_t::none &&
stack_top.keyword != token.keyword);
// Check to see which keyword we got which was considered wrong. // Check to see which keyword we got which was considered wrong.
switch (token.keyword) { switch (token.keyword) {
// Some keywords are only valid in certain contexts. If this cascaded all the // Some keywords are only valid in certain contexts. If this cascaded all the
// way down through the outermost job_list, it was not in a valid context. // way down through the outermost job_list, it was not in a valid context.
case parse_keyword_case: case parse_keyword_t::kw_case:
case parse_keyword_end: case parse_keyword_t::kw_end:
case parse_keyword_else: { case parse_keyword_t::kw_else: {
this->parse_error_unbalancing_token(token); this->parse_error_unbalancing_token(token);
break; break;
} }
case parse_keyword_none: { case parse_keyword_t::none: {
// This is a random other string (not a keyword). // This is a random other string (not a keyword).
const wcstring expected = keyword_description(stack_top.keyword); const wcstring expected = keyword_description(stack_top.keyword);
this->parse_error(token, parse_error_generic, L"Expected keyword '%ls'", this->parse_error(token, parse_error_generic, L"Expected keyword '%ls'",
@ -864,9 +865,9 @@ bool parse_ll_t::top_node_handle_terminal_types(const parse_token_t &token) {
} }
default: { default: {
// Got a real keyword we can report. // Got a real keyword we can report.
const wcstring actual = const wcstring actual = (token.keyword == parse_keyword_t::none
(token.keyword == parse_keyword_none ? token.describe() ? token.describe()
: keyword_description(token.keyword)); : keyword_description(token.keyword));
const wcstring expected = keyword_description(stack_top.keyword); const wcstring expected = keyword_description(stack_top.keyword);
this->parse_error(token, parse_error_generic, this->parse_error(token, parse_error_generic,
L"Expected keyword '%ls', instead got keyword '%ls'", L"Expected keyword '%ls', instead got keyword '%ls'",
@ -874,7 +875,7 @@ bool parse_ll_t::top_node_handle_terminal_types(const parse_token_t &token) {
break; break;
} }
} }
} else if (stack_top.keyword == parse_keyword_end && } else if (stack_top.keyword == parse_keyword_t::kw_end &&
token.type == parse_token_type_terminate && token.type == parse_token_type_terminate &&
this->report_error_for_unclosed_block()) { this->report_error_for_unclosed_block()) {
// handled by report_error_for_unclosed_block // handled by report_error_for_unclosed_block
@ -941,7 +942,8 @@ void parse_ll_t::accept_tokens(parse_token_t token1, parse_token_t token2) {
tnode_t<grammar::variable_assignments> variable_assignments; tnode_t<grammar::variable_assignments> variable_assignments;
if (const parse_node_t *parent = nodes.get_parent(node)) { if (const parse_node_t *parent = nodes.get_parent(node)) {
if (parent->type == symbol_statement && if (parent->type == symbol_statement &&
(token1.keyword == parse_keyword_and || token1.keyword == parse_keyword_or)) { (token1.keyword == parse_keyword_t::kw_and ||
token1.keyword == parse_keyword_t::kw_or)) {
if (const parse_node_t *grandparent = nodes.get_parent(*parent)) { if (const parse_node_t *grandparent = nodes.get_parent(*parent)) {
if (grandparent->type == symbol_job_continuation) { if (grandparent->type == symbol_job_continuation) {
parse_error(token1, parse_error_andor_in_pipeline, L" " parse_error(token1, parse_error_andor_in_pipeline, L" "
@ -1026,11 +1028,11 @@ static bool is_keyword_char(wchar_t c) {
c == L'\'' || c == L'"' || c == L'\\' || c == '\n' || c == L'!'; c == L'\'' || c == L'"' || c == L'\\' || c == '\n' || c == L'!';
} }
/// Given a token, returns the keyword it matches, or parse_keyword_none. /// Given a token, returns the keyword it matches, or parse_keyword_t::none.
static parse_keyword_t keyword_for_token(token_type_t tok, const wcstring &token) { static parse_keyword_t keyword_for_token(token_type_t tok, const wcstring &token) {
/* Only strings can be keywords */ /* Only strings can be keywords */
if (tok != token_type_t::string) { if (tok != token_type_t::string) {
return parse_keyword_none; return parse_keyword_t::none;
} }
// If tok_txt is clean (which most are), we can compare it directly. Otherwise we have to expand // If tok_txt is clean (which most are), we can compare it directly. Otherwise we have to expand
@ -1038,7 +1040,7 @@ static parse_keyword_t keyword_for_token(token_type_t tok, const wcstring &token
// expansions. So we do our own "cleanliness" check; if we find a character not in our allowed // expansions. So we do our own "cleanliness" check; if we find a character not in our allowed
// set we know it's not a keyword, and if we never find a quote we don't have to expand! Note // set we know it's not a keyword, and if we never find a quote we don't have to expand! Note
// that this lowercase set could be shrunk to be just the characters that are in keywords. // that this lowercase set could be shrunk to be just the characters that are in keywords.
parse_keyword_t result = parse_keyword_none; parse_keyword_t result = parse_keyword_t::none;
bool needs_expand = false, all_chars_valid = true; bool needs_expand = false, all_chars_valid = true;
const wchar_t *tok_txt = token.c_str(); const wchar_t *tok_txt = token.c_str();
for (size_t i = 0; tok_txt[i] != L'\0'; i++) { for (size_t i = 0; tok_txt[i] != L'\0'; i++) {

View File

@ -33,7 +33,9 @@ struct source_range_t {
/// A struct representing the token type that we use internally. /// A struct representing the token type that we use internally.
struct parse_token_t { struct parse_token_t {
enum parse_token_type_t type; // The type of the token as represented by the parser enum parse_token_type_t type; // The type of the token as represented by the parser
enum parse_keyword_t keyword { parse_keyword_none }; // Any keyword represented by this token enum parse_keyword_t keyword {
parse_keyword_t::none
}; // Any keyword represented by this token
bool has_dash_prefix{false}; // Hackish: whether the source contains a dash prefix bool has_dash_prefix{false}; // Hackish: whether the source contains a dash prefix
bool is_help_argument{false}; // Hackish: whether the source looks like '-h' or '--help' bool is_help_argument{false}; // Hackish: whether the source looks like '-h' or '--help'
bool is_newline{false}; // Hackish: if TOK_END, whether the source is a newline. bool is_newline{false}; // Hackish: if TOK_END, whether the source is a newline.
@ -119,7 +121,7 @@ class parse_node_t {
child_start(0), child_start(0),
child_count(0), child_count(0),
type(ty), type(ty),
keyword(parse_keyword_none), keyword(parse_keyword_t::none),
flags(0), flags(0),
tag(0) {} tag(0) {}