mirror of
https://github.com/fish-shell/fish-shell.git
synced 2024-11-26 02:13:38 +08:00
Fix some clippies
This commit is contained in:
parent
32ba8d93af
commit
d6e231af0d
|
@ -321,7 +321,7 @@ pub fn function(
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
for named in &opts.named_arguments {
|
for named in &opts.named_arguments {
|
||||||
if !valid_var_name(&named) {
|
if !valid_var_name(named) {
|
||||||
streams
|
streams
|
||||||
.err
|
.err
|
||||||
.append(wgettext_fmt!(BUILTIN_ERR_VARNAME, cmd, named));
|
.append(wgettext_fmt!(BUILTIN_ERR_VARNAME, cmd, named));
|
||||||
|
|
|
@ -291,7 +291,7 @@ impl<'c> Tokenizer<'c> {
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Tokenizer {
|
Tokenizer {
|
||||||
token_cursor: 0,
|
token_cursor: 0,
|
||||||
start: start,
|
start,
|
||||||
has_next: true,
|
has_next: true,
|
||||||
accept_unfinished: flags & TOK_ACCEPT_UNFINISHED,
|
accept_unfinished: flags & TOK_ACCEPT_UNFINISHED,
|
||||||
show_comments: flags & TOK_SHOW_COMMENTS,
|
show_comments: flags & TOK_SHOW_COMMENTS,
|
||||||
|
@ -328,7 +328,7 @@ impl<'c> Iterator for Tokenizer<'c> {
|
||||||
while self.start.char_at(self.token_cursor) == '#' {
|
while self.start.char_at(self.token_cursor) == '#' {
|
||||||
// We have a comment, walk over the comment.
|
// We have a comment, walk over the comment.
|
||||||
let comment_start = self.token_cursor;
|
let comment_start = self.token_cursor;
|
||||||
self.token_cursor = comment_end(&self.start, self.token_cursor);
|
self.token_cursor = comment_end(self.start, self.token_cursor);
|
||||||
let comment_len = self.token_cursor - comment_start;
|
let comment_len = self.token_cursor - comment_start;
|
||||||
|
|
||||||
// If we are going to continue after the comment, skip any trailing newline.
|
// If we are going to continue after the comment, skip any trailing newline.
|
||||||
|
@ -511,7 +511,7 @@ fn iswspace_not_nl(c: char) -> bool {
|
||||||
impl<'c> Tokenizer<'c> {
|
impl<'c> Tokenizer<'c> {
|
||||||
/// Returns the text of a token, as a string.
|
/// Returns the text of a token, as a string.
|
||||||
pub fn text_of(&self, tok: &Tok) -> &wstr {
|
pub fn text_of(&self, tok: &Tok) -> &wstr {
|
||||||
tok.get_source(&self.start)
|
tok.get_source(self.start)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return an error token and mark that we no longer have a next token.
|
/// Return an error token and mark that we no longer have a next token.
|
||||||
|
@ -575,7 +575,7 @@ impl<'c> Tokenizer<'c> {
|
||||||
this.on_quote_toggle
|
this.on_quote_toggle
|
||||||
.as_mut()
|
.as_mut()
|
||||||
.map(|cb| (cb)(this.token_cursor));
|
.map(|cb| (cb)(this.token_cursor));
|
||||||
if let Some(end) = quote_end(&this.start, this.token_cursor, quote) {
|
if let Some(end) = quote_end(this.start, this.token_cursor, quote) {
|
||||||
let mut one_past_end = end + 1;
|
let mut one_past_end = end + 1;
|
||||||
if this.start.char_at(end) == '$' {
|
if this.start.char_at(end) == '$' {
|
||||||
one_past_end = end;
|
one_past_end = end;
|
||||||
|
@ -607,7 +607,7 @@ impl<'c> Tokenizer<'c> {
|
||||||
else if c == '\\' {
|
else if c == '\\' {
|
||||||
mode |= TOK_MODE_CHAR_ESCAPE;
|
mode |= TOK_MODE_CHAR_ESCAPE;
|
||||||
} else if c == '#' && is_token_begin {
|
} else if c == '#' && is_token_begin {
|
||||||
self.token_cursor = comment_end(&self.start, self.token_cursor) - 1;
|
self.token_cursor = comment_end(self.start, self.token_cursor) - 1;
|
||||||
} else if c == '(' {
|
} else if c == '(' {
|
||||||
paran_offsets.push(self.token_cursor);
|
paran_offsets.push(self.token_cursor);
|
||||||
expecting.push(')');
|
expecting.push(')');
|
||||||
|
|
Loading…
Reference in New Issue
Block a user