This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch 
gh-readonly-queue/main/pr-2178-a7d77634df6da1a5440a0a8cdc22aaa85439ee19
in repository https://gitbox.apache.org/repos/asf/datafusion-sqlparser-rs.git

commit 3c7ecf3dc9fef865788a3f2004cb6987b2b7f60c
Author: Andy Grove <[email protected]>
AuthorDate: Fri Jan 23 12:29:19 2026 -0700

    minor: reduce unnecessary string allocations (#2178)
    
    Co-authored-by: Claude Opus 4.5 <[email protected]>
---
 src/ast/mod.rs    |  4 ++--
 src/parser/mod.rs | 10 ++++------
 src/tokenizer.rs  |  4 ++--
 3 files changed, 8 insertions(+), 10 deletions(-)

diff --git a/src/ast/mod.rs b/src/ast/mod.rs
index 0470d6a8..fcfdf364 100644
--- a/src/ast/mod.rs
+++ b/src/ast/mod.rs
@@ -4931,9 +4931,9 @@ impl fmt::Display for Statement {
                     f,
                     "{tables}{read}{export}",
                     tables = if !tables.is_empty() {
-                        " ".to_string() + 
&display_comma_separated(tables).to_string()
+                        format!(" {}", display_comma_separated(tables))
                     } else {
-                        "".to_string()
+                        String::new()
                     },
                     export = if *export { " FOR EXPORT" } else { "" },
                     read = if *read_lock { " WITH READ LOCK" } else { "" }
diff --git a/src/parser/mod.rs b/src/parser/mod.rs
index 0276d058..8d021af8 100644
--- a/src/parser/mod.rs
+++ b/src/parser/mod.rs
@@ -11149,16 +11149,14 @@ impl<'a> Parser<'a> {
     /// Parse a single tab-separated value row used by `COPY` payload parsing.
     pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
         let mut values = vec![];
-        let mut content = String::from("");
+        let mut content = String::new();
         while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
             match t {
                 Token::Whitespace(Whitespace::Tab) => {
-                    values.push(Some(content.to_string()));
-                    content.clear();
+                    values.push(Some(core::mem::take(&mut content)));
                 }
                 Token::Whitespace(Whitespace::Newline) => {
-                    values.push(Some(content.to_string()));
-                    content.clear();
+                    values.push(Some(core::mem::take(&mut content)));
                 }
                 Token::Backslash => {
                     if self.consume_token(&Token::Period) {
@@ -11283,7 +11281,7 @@ impl<'a> Parser<'a> {
                     Token::Number(w, false) => 
Ok(Ident::with_span(next_token.span, w)),
                     _ => self.expected("placeholder", next_token),
                 }?;
-                Ok(Value::Placeholder(tok.to_string() + &ident.value)
+                Ok(Value::Placeholder(format!("{tok}{}", ident.value))
                     .with_span(Span::new(span.start, ident.span.end)))
             }
             unexpected => self.expected(
diff --git a/src/tokenizer.rs b/src/tokenizer.rs
index a9f9fb44..42fa5b61 100644
--- a/src/tokenizer.rs
+++ b/src/tokenizer.rs
@@ -1755,7 +1755,7 @@ impl<'a> Tokenizer<'a> {
                 '?' => {
                     chars.next();
                     let s = peeking_take_while(chars, |ch| ch.is_numeric());
-                    Ok(Some(Token::Placeholder(String::from("?") + &s)))
+                    Ok(Some(Token::Placeholder(format!("?{s}"))))
                 }
 
                 // identifier or keyword
@@ -1904,7 +1904,7 @@ impl<'a> Tokenizer<'a> {
                     }
                 }
             } else {
-                return Ok(Token::Placeholder(String::from("$") + &value));
+                return Ok(Token::Placeholder(format!("${value}")));
             }
         }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to