diff --git a/CHANGELOG.rst b/CHANGELOG.rst index ad57e1599..64194221f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -30,10 +30,13 @@ Notable improvements and fixes Deprecations and removed features --------------------------------- +- ``commandline --tokenize`` (short option ``-o``) has been deprecated in favor of ``commandline --tokens-expanded`` (short option ``-x``) which expands variables and other shell expressions, removing the need to use "eval" in custom completions (:issue:`10212`). + Scripting improvements ---------------------- - ``functions`` and ``type`` now show where a function was copied and where it originally was instead of saying ``Defined interactively``. - Stack trace now shows line numbers for copied functions. +- New option ``commandline --tokens-raw`` prints a list of tokens without any unescaping (:issue:`10212`). Interactive improvements ------------------------ diff --git a/doc_src/cmds/commandline.rst b/doc_src/cmds/commandline.rst index 12291d6b8..17db670b7 100644 --- a/doc_src/cmds/commandline.rst +++ b/doc_src/cmds/commandline.rst @@ -74,13 +74,20 @@ The following options change the way ``commandline`` prints the current commandl **-c** or **--cut-at-cursor** Only print selection up until the current cursor position. - If combined with ``--tokenize``, this will print up until the last completed token - excluding the token the cursor is in. + If combined with ``--expand-tokens``, this will print up until the last completed token - excluding the token the cursor is in. This is typically what you would want for instance in completions. - To get both, use both ``commandline --cut-at-cursor --tokenize; commandline --cut-at-cursor --current-token``, - or ``commandline -co; commandline -ct`` for short. + To get both, use both ``commandline --cut-at-cursor --expand-tokens; commandline --cut-at-cursor --current-token``, + or ``commandline -cx; commandline -ct`` for short. -**-o** or **--tokenize** - Tokenize the selection and print one string-type token per line. +**-x** or **tokens-expanded** + Perform argument expansion on the selection and print one argument per line. + Command substituions are not expanded but forwarded as-is. + +**tokens-raw** + Print arguments in the selection as they appear on the command line, one per line. + +**-o** or **tokenize** + Deprecated, do not use. If ``commandline`` is called during a call to complete a given string using ``complete -C STRING``, ``commandline`` will consider the specified string to be the current contents of the command line. @@ -128,7 +135,7 @@ The most common use for something like completions is :: - set -l tokens (commandline -opc) + set -l tokens (commandline -xpc) which gives the current *process* (what is being completed), tokenized into separate entries, up to but excluding the currently being completed token diff --git a/share/completions/commandline.fish b/share/completions/commandline.fish index 6789cc835..478ea127f 100644 --- a/share/completions/commandline.fish +++ b/share/completions/commandline.fish @@ -12,7 +12,8 @@ complete -c commandline -s b -l current-buffer -d "Select entire command line (d complete -c commandline -s c -l cut-at-cursor -d "Only return that part of the command line before the cursor" complete -c commandline -s f -l function -d "Inject readline functions to reader" -complete -c commandline -s o -l tokenize -d "Print each token on a separate line" +complete -c commandline -s x -l tokens-expanded -d "Print a list of expanded tokens" +complete -c commandline -l tokens-raw -d "Print a list of raw tokens" complete -c commandline -s I -l input -d "Specify command to operate on" complete -c commandline -s C -l cursor -d "Set/get cursor position, not buffer contents" diff --git a/src/builtins/commandline.rs b/src/builtins/commandline.rs index a5011d12a..c96b3d2e7 100644 --- a/src/builtins/commandline.rs +++ b/src/builtins/commandline.rs @@ -1,7 +1,10 @@ use super::prelude::*; use crate::common::{unescape_string, UnescapeFlags, UnescapeStringStyle}; +use crate::complete::Completion; +use crate::expand::{expand_string, ExpandFlags, ExpandResultCode}; use crate::input::input_function_get_code; use crate::input_common::{CharEvent, ReadlineCmd}; +use crate::operation_context::{no_cancel, OperationContext}; use crate::parse_constants::ParserTestErrorBits; use crate::parse_util::{ parse_util_detect_errors, parse_util_job_extent, parse_util_lineno, parse_util_process_extent, @@ -11,9 +14,8 @@ use crate::proc::is_interactive_session; use crate::reader::{ commandline_get_state, commandline_set_buffer, reader_handle_command, reader_queue_ch, }; -use crate::tokenizer::TokenType; -use crate::tokenizer::Tokenizer; use crate::tokenizer::TOK_ACCEPT_UNFINISHED; +use crate::tokenizer::{TokenType, Tokenizer}; use crate::wchar::prelude::*; use crate::wcstringutil::join_strings; use crate::wgetopt::{wgetopter_t, wopt, woption, woption_argument_t}; @@ -37,6 +39,12 @@ enum AppendMode { Append, } +enum TokenMode { + Expanded, + Raw, + Unescaped, +} + /// Replace/append/insert the selection with/at/after the specified string. /// /// \param begin beginning of selection @@ -84,47 +92,85 @@ fn replace_part( /// \param begin start of selection /// \param end end of selection /// \param cut_at_cursor whether printing should stop at the surrent cursor position -/// \param tokenize whether the string should be tokenized, printing one string token on every line /// and skipping non-string tokens /// \param buffer the original command line buffer /// \param cursor_pos the position of the cursor in the command line fn write_part( + parser: &Parser, range: Range, cut_at_cursor: bool, - tokenize: bool, + token_mode: Option, buffer: &wstr, cursor_pos: usize, streams: &mut IoStreams, ) { let pos = cursor_pos - range.start; - if tokenize { - let mut out = WString::new(); - let buff = &buffer[range]; - let mut tok = Tokenizer::new(buff, TOK_ACCEPT_UNFINISHED); - while let Some(token) = tok.next() { - if cut_at_cursor && token.end() >= pos { - break; - } - - if token.type_ == TokenType::string { - let tmp = tok.text_of(&token); - let unescaped = - unescape_string(tmp, UnescapeStringStyle::Script(UnescapeFlags::INCOMPLETE)) - .unwrap(); - out.push_utfstr(&unescaped); - out.push('\n'); - } - } - - streams.out.append(out); - } else { + let Some(token_mode) = token_mode else { if cut_at_cursor { streams.out.append(&buffer[range.start..range.start + pos]); } else { streams.out.append(&buffer[range]); } streams.out.push('\n'); + return; + }; + + let buff = &buffer[range]; + let mut tok = Tokenizer::new(buff, TOK_ACCEPT_UNFINISHED); + let mut args = vec![]; + while let Some(token) = tok.next() { + if cut_at_cursor && token.end() >= pos { + break; + } + if token.type_ != TokenType::string { + continue; + } + + let token_text = tok.text_of(&token); + + match token_mode { + TokenMode::Expanded => { + const COMMANDLINE_TOKENS_MAX_EXPANSION: usize = 512; + + match expand_string( + token_text.to_owned(), + &mut args, + ExpandFlags::SKIP_CMDSUBST, + &OperationContext::foreground( + parser.shared(), + Box::new(no_cancel), + COMMANDLINE_TOKENS_MAX_EXPANSION, + ), + None, + ) + .result + { + ExpandResultCode::error | ExpandResultCode::wildcard_no_match => { + // Hit expansion limit, forward the unexpanded string. + args.push(Completion::from_completion(token_text.to_owned())); + } + ExpandResultCode::cancel => { + return; + } + ExpandResultCode::ok => (), + }; + } + TokenMode::Raw => { + args.push(Completion::from_completion(token_text.to_owned())); + } + TokenMode::Unescaped => { + let unescaped = unescape_string( + token_text, + UnescapeStringStyle::Script(UnescapeFlags::INCOMPLETE), + ) + .unwrap(); + args.push(Completion::from_completion(unescaped)); + } + } + } + for arg in args { + streams.out.appendln(arg.completion); } } @@ -139,7 +185,7 @@ pub fn commandline(parser: &Parser, streams: &mut IoStreams, args: &mut [&wstr]) let mut function_mode = false; let mut selection_mode = false; - let mut tokenize = false; + let mut token_mode = None; let mut cursor_mode = false; let mut selection_start_mode = false; @@ -155,7 +201,7 @@ pub fn commandline(parser: &Parser, streams: &mut IoStreams, args: &mut [&wstr]) let ld = parser.libdata(); - const short_options: &wstr = L!(":abijpctforhI:CBELSsP"); + const short_options: &wstr = L!(":abijpctfxorhI:CBELSsP"); let long_options: &[woption] = &[ wopt(L!("append"), woption_argument_t::no_argument, 'a'), wopt(L!("insert"), woption_argument_t::no_argument, 'i'), @@ -171,6 +217,8 @@ pub fn commandline(parser: &Parser, streams: &mut IoStreams, args: &mut [&wstr]) wopt(L!("current-token"), woption_argument_t::no_argument, 't'), wopt(L!("cut-at-cursor"), woption_argument_t::no_argument, 'c'), wopt(L!("function"), woption_argument_t::no_argument, 'f'), + wopt(L!("tokens-expanded"), woption_argument_t::no_argument, 'x'), + wopt(L!("tokens-raw"), woption_argument_t::no_argument, '\x02'), wopt(L!("tokenize"), woption_argument_t::no_argument, 'o'), wopt(L!("help"), woption_argument_t::no_argument, 'h'), wopt(L!("input"), woption_argument_t::required_argument, 'I'), @@ -197,7 +245,23 @@ pub fn commandline(parser: &Parser, streams: &mut IoStreams, args: &mut [&wstr]) 'j' => buffer_part = Some(TextScope::Job), 'p' => buffer_part = Some(TextScope::Process), 'f' => function_mode = true, - 'o' => tokenize = true, + 'x' | '\x02' | 'o' => { + if token_mode.is_some() { + streams.err.append(wgettext_fmt!( + BUILTIN_ERR_COMBO2, + cmd, + wgettext!("--tokens options are mutually exclusive") + )); + builtin_print_error_trailer(parser, streams.err, cmd); + return STATUS_INVALID_ARGS; + } + token_mode = Some(match c { + 'x' => TokenMode::Expanded, + '\x02' => TokenMode::Raw, + 'o' => TokenMode::Unescaped, + _ => unreachable!(), + }) + } 'I' => { // A historical, undocumented feature. TODO: consider removing this. override_buffer = Some(w.woptarg.unwrap().to_owned()); @@ -234,7 +298,7 @@ pub fn commandline(parser: &Parser, streams: &mut IoStreams, args: &mut [&wstr]) if buffer_part.is_some() || cut_at_cursor || append_mode.is_some() - || tokenize + || token_mode.is_some() || cursor_mode || line_mode || search_mode @@ -310,7 +374,7 @@ pub fn commandline(parser: &Parser, streams: &mut IoStreams, args: &mut [&wstr]) return STATUS_INVALID_ARGS; } - if (buffer_part.is_some() || tokenize || cut_at_cursor) + if (buffer_part.is_some() || token_mode.is_some() || cut_at_cursor) && (cursor_mode || line_mode || search_mode || paging_mode || paging_full_mode) // Special case - we allow to get/set cursor position relative to the process/job/token. && (buffer_part.is_none() || !cursor_mode) @@ -320,11 +384,11 @@ pub fn commandline(parser: &Parser, streams: &mut IoStreams, args: &mut [&wstr]) return STATUS_INVALID_ARGS; } - if (tokenize || cut_at_cursor) && positional_args != 0 { + if (token_mode.is_some() || cut_at_cursor) && positional_args != 0 { streams.err.append(wgettext_fmt!( BUILTIN_ERR_COMBO2, cmd, - "--cut-at-cursor and --tokenize can not be used when setting the commandline" + "--cut-at-cursor and --tokens can not be used when setting the commandline" )); builtin_print_error_trailer(parser, streams.err, cmd); return STATUS_INVALID_ARGS; @@ -475,9 +539,10 @@ pub fn commandline(parser: &Parser, streams: &mut IoStreams, args: &mut [&wstr]) if positional_args == 0 { write_part( + parser, range, cut_at_cursor, - tokenize, + token_mode, current_buffer, current_cursor_pos, streams, diff --git a/src/expand.rs b/src/expand.rs index 2305fdbb3..f3848df1f 100644 --- a/src/expand.rs +++ b/src/expand.rs @@ -34,6 +34,8 @@ bitflags! { pub struct ExpandFlags : u16 { /// Fail expansion if there is a command substitution. const FAIL_ON_CMDSUBST = 1 << 0; + /// Skip command substitutions. + const SKIP_CMDSUBST = 1 << 14; /// Skip variable expansion. const SKIP_VARIABLES = 1 << 1; /// Skip wildcard expansion. @@ -1309,6 +1311,12 @@ impl<'a, 'b, 'c> Expander<'a, 'b, 'c> { } fn stage_cmdsubst(&mut self, input: WString, out: &mut CompletionReceiver) -> ExpandResult { + if self.flags.contains(ExpandFlags::SKIP_CMDSUBST) { + if !out.add(input) { + return append_overflow_error(self.errors, None); + } + return ExpandResult::ok(); + } if self.flags.contains(ExpandFlags::FAIL_ON_CMDSUBST) { let mut cursor = 0; let mut start = 0; diff --git a/tests/checks/complete.fish b/tests/checks/complete.fish index 5fc76c9ed..80fa5042a 100644 --- a/tests/checks/complete.fish +++ b/tests/checks/complete.fish @@ -552,3 +552,44 @@ rm -r $tmpdir complete -C'complete --command=mktemp' | string replace -rf '=mktemp\t.*' '=mktemp' # (one "--command=" is okay, we used to get "--command=--command=" # CHECK: --command=mktemp + +## Test token expansion in commandline -x + +complete complete_make -f -a '(argparse C/directory= -- (commandline -xpc)[2..]; + echo Completing targets in directory $_flag_C)' +var=path/to complete -C'complete_make -C "$var/build-directory" ' +# CHECK: Completing targets in directory path/to/build-directory +var1=path complete -C'var2=to complete_make -C "$var1/$var2/other-build-directory" ' +# CHECK: Completing targets in directory path/to/other-build-directory + +complete complete_existing_argument -f -a '(commandline -xpc)[2..]' +var=a_value complete -C'complete_existing_argument "1 2" $var \'quoted (foo bar)\' unquoted(baz qux) ' +# CHECK: 1 2 +# CHECK: a_value +# CHECK: quoted (foo bar) +# CHECK: unquoted(baz qux) + +complete complete_first_argument_and_count -f -a '(set -l args (commandline -xpc)[2..] + echo (count $args) arguments, first argument is $args[1])' +list=arg(seq 10) begin + complete -C'complete_first_argument_and_count $list$list ' + # CHECK: 100 arguments, first argument is arg1arg1 + complete -C'complete_first_argument_and_count $list$list$list ' + # CHECK: 1 arguments, first argument is $list$list$list +end + +## Test commandline --tokens-raw +complete complete_raw_tokens -f -ka '(commandline --tokens-raw)' +complete -C'complete_raw_tokens "foo" bar\\ baz (qux) ' +# CHECK: complete_raw_tokens +# CHECK: "foo" +# CHECK: bar\ baz +# CHECK: (qux) + +## Test deprecated commandline -o +complete complete_unescaped_tokens -f -ka '(commandline -o)' +complete -C'complete_unescaped_tokens "foo" bar\\ baz (qux) ' +# CHECK: complete_unescaped_tokens +# CHECK: foo +# CHECK: bar baz +# CHECK: (qux)