🌐 AI搜索 & 代理 主页
Skip to content
Draft
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
clippy
  • Loading branch information
ShaharNaveh committed Nov 8, 2025
commit 04371d32c733261d5c72d04f31377a4d19a98693
6 changes: 3 additions & 3 deletions stdlib/src/tokenize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ mod _tokenize {
.map_err(|_| vm.new_type_error("readline() returned a non-bytes object"))?;
vm.state
.codec_registry
.decode_text(bytes.into(), &encoding, None, vm)
.decode_text(bytes.into(), encoding, None, vm)
.map(|s| s.as_str().to_owned())?
}
None => raw_line
Expand Down Expand Up @@ -109,7 +109,7 @@ mod _tokenize {
// TODO: Check here for errors. Raise SyntaxError if needed

if let Some(tok) = state.next_token() {
break tok.clone();
break tok;
}

let nline = zelf.readline(vm)?;
Expand Down Expand Up @@ -157,7 +157,7 @@ mod _tokenize {
.ctx
.new_tuple(vec![
token_value.to_pyobject(vm),
vm.ctx.new_str(&*token_repr).into(),
vm.ctx.new_str(token_repr).into(),
vm.ctx
.new_tuple(vec![start_x.to_pyobject(vm), start_y.to_pyobject(vm)])
.into(),
Expand Down
Loading