This is an automated email from the ASF dual-hosted git repository.

liurenjie1024 pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/iceberg-rust.git


The following commit(s) were added to refs/heads/main by this push:
     new 4eafd2ca2 chore: inline format args (#1805)
4eafd2ca2 is described below

commit 4eafd2ca22e032c3ee4d9e6abe6ff3eae93e0189
Author: Colin Marc <[email protected]>
AuthorDate: Thu Oct 30 15:29:00 2025 +0100

    chore: inline format args (#1805)
    
    The `uninlined_format_args` lint became default in a recent version of
    rust.
    
    I just ran `clippy --fix`.
---
 crates/catalog/glue/src/catalog.rs                 | 24 +++----
 crates/catalog/glue/src/error.rs                   |  4 +-
 crates/catalog/glue/src/schema.rs                  |  6 +-
 crates/catalog/glue/src/utils.rs                   |  5 +-
 crates/catalog/glue/tests/glue_catalog_test.rs     |  4 +-
 crates/catalog/hms/src/catalog.rs                  |  4 +-
 crates/catalog/hms/src/error.rs                    |  4 +-
 crates/catalog/hms/src/schema.rs                   |  6 +-
 crates/catalog/hms/src/utils.rs                    | 12 ++--
 crates/catalog/hms/tests/hms_catalog_test.rs       |  2 +-
 crates/catalog/rest/src/catalog.rs                 |  6 +-
 crates/catalog/rest/tests/rest_catalog_test.rs     |  2 +-
 crates/catalog/s3tables/src/catalog.rs             | 18 +++---
 crates/catalog/sql/src/catalog.rs                  | 49 +++++---------
 crates/catalog/sql/src/error.rs                    |  6 +-
 crates/examples/src/rest_catalog_namespace.rs      |  9 +--
 crates/examples/src/rest_catalog_table.rs          |  2 +-
 .../src/arrow/caching_delete_file_loader.rs        |  2 +-
 crates/iceberg/src/arrow/delete_file_loader.rs     |  2 +-
 .../src/arrow/partition_value_calculator.rs        |  2 +-
 crates/iceberg/src/arrow/reader.rs                 | 22 +++----
 .../src/arrow/record_batch_partition_splitter.rs   |  3 +-
 .../iceberg/src/arrow/record_batch_transformer.rs  |  6 +-
 crates/iceberg/src/arrow/schema.rs                 | 13 ++--
 crates/iceberg/src/arrow/value.rs                  |  6 +-
 crates/iceberg/src/catalog/memory/catalog.rs       | 74 ++++++----------------
 .../iceberg/src/catalog/memory/namespace_state.rs  | 14 ++--
 crates/iceberg/src/catalog/metadata_location.rs    | 11 ++--
 crates/iceberg/src/catalog/mod.rs                  | 10 +--
 crates/iceberg/src/expr/predicate.rs               | 16 ++---
 .../src/expr/visitors/page_index_evaluator.rs      |  9 +--
 .../src/expr/visitors/strict_metrics_evaluator.rs  | 45 +++----------
 crates/iceberg/src/io/object_cache.rs              |  4 +-
 crates/iceberg/src/io/storage.rs                   |  2 +-
 crates/iceberg/src/io/storage_s3.rs                |  5 +-
 crates/iceberg/src/puffin/metadata.rs              | 13 ++--
 crates/iceberg/src/scan/cache.rs                   |  2 +-
 crates/iceberg/src/scan/mod.rs                     | 15 ++---
 crates/iceberg/src/spec/datatypes.rs               | 10 +--
 crates/iceberg/src/spec/manifest/data_file.rs      |  4 +-
 crates/iceberg/src/spec/manifest/entry.rs          |  2 +-
 crates/iceberg/src/spec/manifest_list.rs           |  5 +-
 crates/iceberg/src/spec/partition.rs               |  7 +-
 crates/iceberg/src/spec/schema/id_reassigner.rs    |  4 +-
 crates/iceberg/src/spec/schema/mod.rs              | 11 ++--
 crates/iceberg/src/spec/schema/utils.rs            |  5 +-
 crates/iceberg/src/spec/snapshot.rs                |  2 +-
 crates/iceberg/src/spec/snapshot_summary.rs        |  6 +-
 crates/iceberg/src/spec/table_metadata.rs          | 18 ++----
 crates/iceberg/src/spec/table_metadata_builder.rs  | 13 ++--
 crates/iceberg/src/spec/table_properties.rs        |  2 +-
 crates/iceberg/src/spec/transform.rs               |  2 +-
 crates/iceberg/src/spec/values.rs                  | 50 ++++++---------
 crates/iceberg/src/spec/view_metadata.rs           |  4 +-
 crates/iceberg/src/spec/view_metadata_builder.rs   |  8 +--
 .../iceberg/src/transaction/update_properties.rs   |  5 +-
 crates/iceberg/src/transform/mod.rs                | 10 +--
 crates/iceberg/src/transform/temporal.rs           | 10 +--
 crates/iceberg/src/transform/truncate.rs           |  4 +-
 .../src/writer/base_writer/data_file_writer.rs     |  2 +-
 .../writer/base_writer/equality_delete_writer.rs   |  2 +-
 .../src/writer/file_writer/location_generator.rs   |  4 +-
 .../src/writer/file_writer/parquet_writer.rs       | 11 ++--
 .../src/writer/file_writer/rolling_writer.rs       |  3 +-
 .../src/writer/partitioning/clustered_writer.rs    |  6 +-
 crates/iceberg/tests/file_io_s3_test.rs            |  6 +-
 crates/integration_tests/src/lib.rs                |  2 +-
 .../tests/shared_tests/read_positional_deletes.rs  |  4 +-
 crates/integrations/datafusion/src/error.rs        |  2 +-
 .../datafusion/src/physical_plan/commit.rs         |  3 +-
 .../datafusion/src/physical_plan/scan.rs           |  2 +-
 .../datafusion/src/physical_plan/write.rs          | 11 ++--
 crates/integrations/playground/src/main.rs         |  2 +-
 crates/sqllogictest/src/engine/mod.rs              |  2 +-
 74 files changed, 241 insertions(+), 442 deletions(-)

diff --git a/crates/catalog/glue/src/catalog.rs 
b/crates/catalog/glue/src/catalog.rs
index 9b383df2b..4514f2d7a 100644
--- a/crates/catalog/glue/src/catalog.rs
+++ b/crates/catalog/glue/src/catalog.rs
@@ -293,7 +293,7 @@ impl Catalog for GlueCatalog {
             }
             None => Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!("Database with name: {} does not exist", db_name),
+                format!("Database with name: {db_name} does not exist"),
             )),
         }
     }
@@ -531,8 +531,7 @@ impl Catalog for GlueCatalog {
             None => Err(Error::new(
                 ErrorKind::TableNotFound,
                 format!(
-                    "Table object for database: {} and table: {} does not 
exist",
-                    db_name, table_name
+                    "Table object for database: {db_name} and table: 
{table_name} does not exist"
                 ),
             )),
             Some(table) => {
@@ -643,8 +642,7 @@ impl Catalog for GlueCatalog {
             None => Err(Error::new(
                 ErrorKind::TableNotFound,
                 format!(
-                    "'Table' object for database: {} and table: {} does not 
exist",
-                    src_db_name, src_table_name
+                    "'Table' object for database: {src_db_name} and table: 
{src_table_name} does not exist"
                 ),
             )),
             Some(table) => {
@@ -672,10 +670,8 @@ impl Catalog for GlueCatalog {
                 match drop_src_table_result {
                     Ok(_) => Ok(()),
                     Err(_) => {
-                        let err_msg_src_table = format!(
-                            "Failed to drop old table {}.{}.",
-                            src_db_name, src_table_name
-                        );
+                        let err_msg_src_table =
+                            format!("Failed to drop old table 
{src_db_name}.{src_table_name}.");
 
                         let drop_dest_table_result = 
self.drop_table(dest).await;
 
@@ -683,15 +679,13 @@ impl Catalog for GlueCatalog {
                             Ok(_) => Err(Error::new(
                                 ErrorKind::Unexpected,
                                 format!(
-                                    "{} Rolled back table creation for {}.{}.",
-                                    err_msg_src_table, dest_db_name, 
dest_table_name
+                                    "{err_msg_src_table} Rolled back table 
creation for {dest_db_name}.{dest_table_name}."
                                 ),
                             )),
                             Err(_) => Err(Error::new(
                                 ErrorKind::Unexpected,
                                 format!(
-                                    "{} Failed to roll back table creation for 
{}.{}. Please clean up manually.",
-                                    err_msg_src_table, dest_db_name, 
dest_table_name
+                                    "{err_msg_src_table} Failed to roll back 
table creation for {dest_db_name}.{dest_table_name}. Please clean up manually."
                                 ),
                             )),
                         }
@@ -753,7 +747,7 @@ impl Catalog for GlueCatalog {
                     format!("Failed to register table {table_ident} due to AWS 
SDK error"),
                 ),
             }
-            .with_source(anyhow!("aws sdk error: {:?}", error))
+            .with_source(anyhow!("aws sdk error: {error:?}"))
         })?;
 
         Ok(Table::builder()
@@ -811,7 +805,7 @@ impl Catalog for GlueCatalog {
                     format!("Operation failed for table: {table_ident} for 
hitting aws sdk error"),
                 ),
             }
-            .with_source(anyhow!("aws sdk error: {:?}", error))
+            .with_source(anyhow!("aws sdk error: {error:?}"))
         })?;
 
         Ok(staged_table)
diff --git a/crates/catalog/glue/src/error.rs b/crates/catalog/glue/src/error.rs
index 741d8c90c..c936ff4ed 100644
--- a/crates/catalog/glue/src/error.rs
+++ b/crates/catalog/glue/src/error.rs
@@ -27,7 +27,7 @@ where T: Debug {
         ErrorKind::Unexpected,
         "Operation failed for hitting aws sdk error".to_string(),
     )
-    .with_source(anyhow!("aws sdk error: {:?}", error))
+    .with_source(anyhow!("aws sdk error: {error:?}"))
 }
 
 /// Format AWS Build error into iceberg error
@@ -36,5 +36,5 @@ pub(crate) fn from_aws_build_error(error: 
aws_sdk_glue::error::BuildError) -> Er
         ErrorKind::Unexpected,
         "Operation failed for hitting aws build error".to_string(),
     )
-    .with_source(anyhow!("aws build error: {:?}", error))
+    .with_source(anyhow!("aws build error: {error:?}"))
 }
diff --git a/crates/catalog/glue/src/schema.rs 
b/crates/catalog/glue/src/schema.rs
index 43918484d..cfd748797 100644
--- a/crates/catalog/glue/src/schema.rs
+++ b/crates/catalog/glue/src/schema.rs
@@ -143,7 +143,7 @@ impl SchemaVisitor for GlueSchemaBuilder {
     }
 
     fn list(&mut self, _list: &iceberg::spec::ListType, value: String) -> 
iceberg::Result<String> {
-        Ok(format!("array<{}>", value))
+        Ok(format!("array<{value}>"))
     }
 
     fn map(
@@ -152,7 +152,7 @@ impl SchemaVisitor for GlueSchemaBuilder {
         key_value: String,
         value: String,
     ) -> iceberg::Result<String> {
-        Ok(format!("map<{},{}>", key_value, value))
+        Ok(format!("map<{key_value},{value}>"))
     }
 
     fn primitive(&mut self, p: &iceberg::spec::PrimitiveType) -> 
iceberg::Result<Self::T> {
@@ -171,7 +171,7 @@ impl SchemaVisitor for GlueSchemaBuilder {
             }
             PrimitiveType::Binary | PrimitiveType::Fixed(_) => 
"binary".to_string(),
             PrimitiveType::Decimal { precision, scale } => {
-                format!("decimal({},{})", precision, scale)
+                format!("decimal({precision},{scale})")
             }
             _ => {
                 return Err(Error::new(
diff --git a/crates/catalog/glue/src/utils.rs b/crates/catalog/glue/src/utils.rs
index d2c21b4d7..457471b34 100644
--- a/crates/catalog/glue/src/utils.rs
+++ b/crates/catalog/glue/src/utils.rs
@@ -185,8 +185,7 @@ pub(crate) fn validate_namespace(namespace: 
&NamespaceIdent) -> Result<String> {
         return Err(Error::new(
             ErrorKind::DataInvalid,
             format!(
-                "Invalid database name: {:?}, hierarchical namespaces are not 
supported",
-                namespace
+                "Invalid database name: {namespace:?}, hierarchical namespaces 
are not supported"
             ),
         ));
     }
@@ -236,7 +235,7 @@ pub(crate) fn get_metadata_location(
             Some(location) => Ok(location.to_string()),
             None => Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!("No '{}' set on table", METADATA_LOCATION),
+                format!("No '{METADATA_LOCATION}' set on table"),
             )),
         },
         None => Err(Error::new(
diff --git a/crates/catalog/glue/tests/glue_catalog_test.rs 
b/crates/catalog/glue/tests/glue_catalog_test.rs
index 548fc4f0f..491703214 100644
--- a/crates/catalog/glue/tests/glue_catalog_test.rs
+++ b/crates/catalog/glue/tests/glue_catalog_test.rs
@@ -91,7 +91,7 @@ async fn get_catalog() -> GlueCatalog {
         (AWS_REGION_NAME.to_string(), "us-east-1".to_string()),
         (
             S3_ENDPOINT.to_string(),
-            format!("http://{}";, minio_socket_addr),
+            format!("http://{minio_socket_addr}";),
         ),
         (S3_ACCESS_KEY_ID.to_string(), "admin".to_string()),
         (S3_SECRET_ACCESS_KEY.to_string(), "password".to_string()),
@@ -119,7 +119,7 @@ async fn get_catalog() -> GlueCatalog {
     let mut glue_props = HashMap::from([
         (
             GLUE_CATALOG_PROP_URI.to_string(),
-            format!("http://{}";, glue_socket_addr),
+            format!("http://{glue_socket_addr}";),
         ),
         (
             GLUE_CATALOG_PROP_WAREHOUSE.to_string(),
diff --git a/crates/catalog/hms/src/catalog.rs 
b/crates/catalog/hms/src/catalog.rs
index c8f046cb7..b7d192210 100644
--- a/crates/catalog/hms/src/catalog.rs
+++ b/crates/catalog/hms/src/catalog.rs
@@ -321,7 +321,7 @@ impl Catalog for HmsCatalog {
                 ErrorKind::Unexpected,
                 "Operation failed for hitting thrift error".to_string(),
             )
-            .with_source(anyhow!("thrift error: {:?}", exception))),
+            .with_source(anyhow!("thrift error: {exception:?}"))),
             Err(err) => Err(from_thrift_error(err)),
         }
     }
@@ -554,7 +554,7 @@ impl Catalog for HmsCatalog {
                 ErrorKind::Unexpected,
                 "Operation failed for hitting thrift error".to_string(),
             )
-            .with_source(anyhow!("thrift error: {:?}", exception))),
+            .with_source(anyhow!("thrift error: {exception:?}"))),
             Err(err) => Err(from_thrift_error(err)),
         }
     }
diff --git a/crates/catalog/hms/src/error.rs b/crates/catalog/hms/src/error.rs
index 15da3eaf6..ad3cca070 100644
--- a/crates/catalog/hms/src/error.rs
+++ b/crates/catalog/hms/src/error.rs
@@ -30,7 +30,7 @@ pub fn from_thrift_error(error: impl std::error::Error) -> 
Error {
         ErrorKind::Unexpected,
         "Operation failed for hitting thrift error".to_string(),
     )
-    .with_source(anyhow!("thrift error: {:?}", error))
+    .with_source(anyhow!("thrift error: {error:?}"))
 }
 
 /// Format a thrift exception into iceberg error.
@@ -41,7 +41,7 @@ pub fn from_thrift_exception<T, E: Debug>(value: 
MaybeException<T, E>) -> Result
             ErrorKind::Unexpected,
             "Operation failed for hitting thrift error".to_string(),
         )
-        .with_source(anyhow!("thrift error: {:?}", err))),
+        .with_source(anyhow!("thrift error: {err:?}"))),
     }
 }
 
diff --git a/crates/catalog/hms/src/schema.rs b/crates/catalog/hms/src/schema.rs
index 6b7f71c6a..8893a8052 100644
--- a/crates/catalog/hms/src/schema.rs
+++ b/crates/catalog/hms/src/schema.rs
@@ -100,7 +100,7 @@ impl SchemaVisitor for HiveSchemaBuilder {
     }
 
     fn list(&mut self, _list: &iceberg::spec::ListType, value: String) -> 
iceberg::Result<String> {
-        Ok(format!("array<{}>", value))
+        Ok(format!("array<{value}>"))
     }
 
     fn map(
@@ -109,7 +109,7 @@ impl SchemaVisitor for HiveSchemaBuilder {
         key_value: String,
         value: String,
     ) -> iceberg::Result<String> {
-        Ok(format!("map<{},{}>", key_value, value))
+        Ok(format!("map<{key_value},{value}>"))
     }
 
     fn primitive(&mut self, p: &iceberg::spec::PrimitiveType) -> 
iceberg::Result<String> {
@@ -128,7 +128,7 @@ impl SchemaVisitor for HiveSchemaBuilder {
             }
             PrimitiveType::Binary | PrimitiveType::Fixed(_) => 
"binary".to_string(),
             PrimitiveType::Decimal { precision, scale } => {
-                format!("decimal({},{})", precision, scale)
+                format!("decimal({precision},{scale})")
             }
             _ => {
                 return Err(Error::new(
diff --git a/crates/catalog/hms/src/utils.rs b/crates/catalog/hms/src/utils.rs
index 32c28b82e..096e792f6 100644
--- a/crates/catalog/hms/src/utils.rs
+++ b/crates/catalog/hms/src/utils.rs
@@ -128,7 +128,7 @@ pub(crate) fn convert_to_database(
                     _ => {
                         return Err(Error::new(
                             ErrorKind::DataInvalid,
-                            format!("Invalid value for setting 'owner_type': 
{}", v),
+                            format!("Invalid value for setting 'owner_type': 
{v}"),
                         ));
                     }
                 };
@@ -214,8 +214,7 @@ pub(crate) fn validate_namespace(namespace: 
&NamespaceIdent) -> Result<String> {
         return Err(Error::new(
             ErrorKind::DataInvalid,
             format!(
-                "Invalid database name: {:?}, hierarchical namespaces are not 
supported",
-                namespace
+                "Invalid database name: {namespace:?}, hierarchical namespaces 
are not supported"
             ),
         ));
     }
@@ -257,7 +256,7 @@ pub(crate) fn get_metadata_location(
             Some(location) => Ok(location.to_string()),
             None => Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!("No '{}' set on table", METADATA_LOCATION),
+                format!("No '{METADATA_LOCATION}' set on table"),
             )),
         },
         None => Err(Error::new(
@@ -272,7 +271,7 @@ fn format_location_uri(location: String) -> String {
     let mut location = location;
 
     if !location.starts_with('/') {
-        location = format!("/{}", location);
+        location = format!("/{location}");
     }
 
     if location.ends_with('/') && location.len() > 1 {
@@ -292,8 +291,7 @@ fn validate_owner_settings(properties: &HashMap<String, 
String>) -> Result<()> {
         return Err(Error::new(
             ErrorKind::DataInvalid,
             format!(
-                "Setting '{}' without setting '{}' is not allowed",
-                HMS_DB_OWNER_TYPE, HMS_DB_OWNER
+                "Setting '{HMS_DB_OWNER_TYPE}' without setting 
'{HMS_DB_OWNER}' is not allowed"
             ),
         ));
     }
diff --git a/crates/catalog/hms/tests/hms_catalog_test.rs 
b/crates/catalog/hms/tests/hms_catalog_test.rs
index 2bf12779b..9793b7f73 100644
--- a/crates/catalog/hms/tests/hms_catalog_test.rs
+++ b/crates/catalog/hms/tests/hms_catalog_test.rs
@@ -96,7 +96,7 @@ async fn get_catalog() -> HmsCatalog {
         ),
         (
             S3_ENDPOINT.to_string(),
-            format!("http://{}";, minio_socket_addr),
+            format!("http://{minio_socket_addr}";),
         ),
         (S3_ACCESS_KEY_ID.to_string(), "admin".to_string()),
         (S3_SECRET_ACCESS_KEY.to_string(), "password".to_string()),
diff --git a/crates/catalog/rest/src/catalog.rs 
b/crates/catalog/rest/src/catalog.rs
index 6cb4d93b9..39553f755 100644
--- a/crates/catalog/rest/src/catalog.rs
+++ b/crates/catalog/rest/src/catalog.rs
@@ -243,7 +243,7 @@ impl RestCatalogConfig {
             ),
             (
                 header::USER_AGENT,
-                HeaderValue::from_str(&format!("iceberg-rs/{}", 
CARGO_PKG_VERSION)).unwrap(),
+                
HeaderValue::from_str(&format!("iceberg-rs/{CARGO_PKG_VERSION}")).unwrap(),
             ),
         ]);
 
@@ -1273,7 +1273,7 @@ mod tests {
             ),
             (
                 header::USER_AGENT,
-                HeaderValue::from_str(&format!("iceberg-rs/{}", 
CARGO_PKG_VERSION)).unwrap(),
+                
HeaderValue::from_str(&format!("iceberg-rs/{CARGO_PKG_VERSION}")).unwrap(),
             ),
         ]);
         assert_eq!(headers, expected_headers);
@@ -1310,7 +1310,7 @@ mod tests {
             ),
             (
                 header::USER_AGENT,
-                HeaderValue::from_str(&format!("iceberg-rs/{}", 
CARGO_PKG_VERSION)).unwrap(),
+                
HeaderValue::from_str(&format!("iceberg-rs/{CARGO_PKG_VERSION}")).unwrap(),
             ),
             (
                 HeaderName::from_static("customized-header"),
diff --git a/crates/catalog/rest/tests/rest_catalog_test.rs 
b/crates/catalog/rest/tests/rest_catalog_test.rs
index 7ce76ec34..59fea0b51 100644
--- a/crates/catalog/rest/tests/rest_catalog_test.rs
+++ b/crates/catalog/rest/tests/rest_catalog_test.rs
@@ -72,7 +72,7 @@ async fn get_catalog() -> RestCatalog {
             "rest",
             HashMap::from([(
                 REST_CATALOG_PROP_URI.to_string(),
-                format!("http://{}";, rest_socket_addr),
+                format!("http://{rest_socket_addr}";),
             )]),
         )
         .await
diff --git a/crates/catalog/s3tables/src/catalog.rs 
b/crates/catalog/s3tables/src/catalog.rs
index daa659055..3606fac99 100644
--- a/crates/catalog/s3tables/src/catalog.rs
+++ b/crates/catalog/s3tables/src/catalog.rs
@@ -637,7 +637,7 @@ impl Catalog for S3TablesCatalog {
                     "Operation failed for hitting aws sdk error",
                 ),
             }
-            .with_source(anyhow::Error::msg(format!("aws sdk error: {:?}", 
error)))
+            .with_source(anyhow::Error::msg(format!("aws sdk error: 
{error:?}")))
         })?;
 
         Ok(staged_table)
@@ -649,7 +649,7 @@ pub(crate) fn from_aws_sdk_error<T>(error: 
aws_sdk_s3tables::error::SdkError<T>)
 where T: std::fmt::Debug {
     Error::new(
         ErrorKind::Unexpected,
-        format!("Operation failed for hitting aws sdk error: {:?}", error),
+        format!("Operation failed for hitting aws sdk error: {error:?}"),
     )
 }
 
@@ -682,7 +682,7 @@ mod tests {
         let catalog = match load_s3tables_catalog_from_env().await {
             Ok(Some(catalog)) => catalog,
             Ok(None) => return,
-            Err(e) => panic!("Error loading catalog: {}", e),
+            Err(e) => panic!("Error loading catalog: {e}"),
         };
 
         let namespaces = catalog.list_namespaces(None).await.unwrap();
@@ -694,7 +694,7 @@ mod tests {
         let catalog = match load_s3tables_catalog_from_env().await {
             Ok(Some(catalog)) => catalog,
             Ok(None) => return,
-            Err(e) => panic!("Error loading catalog: {}", e),
+            Err(e) => panic!("Error loading catalog: {e}"),
         };
 
         let tables = catalog
@@ -709,7 +709,7 @@ mod tests {
         let catalog = match load_s3tables_catalog_from_env().await {
             Ok(Some(catalog)) => catalog,
             Ok(None) => return,
-            Err(e) => panic!("Error loading catalog: {}", e),
+            Err(e) => panic!("Error loading catalog: {e}"),
         };
 
         let table = catalog
@@ -719,7 +719,7 @@ mod tests {
             ))
             .await
             .unwrap();
-        println!("{:?}", table);
+        println!("{table:?}");
     }
 
     #[tokio::test]
@@ -727,7 +727,7 @@ mod tests {
         let catalog = match load_s3tables_catalog_from_env().await {
             Ok(Some(catalog)) => catalog,
             Ok(None) => return,
-            Err(e) => panic!("Error loading catalog: {}", e),
+            Err(e) => panic!("Error loading catalog: {e}"),
         };
 
         let namespace = 
NamespaceIdent::new("test_s3tables_create_delete_namespace".to_string());
@@ -745,7 +745,7 @@ mod tests {
         let catalog = match load_s3tables_catalog_from_env().await {
             Ok(Some(catalog)) => catalog,
             Ok(None) => return,
-            Err(e) => panic!("Error loading catalog: {}", e),
+            Err(e) => panic!("Error loading catalog: {e}"),
         };
 
         let creation = {
@@ -788,7 +788,7 @@ mod tests {
         let catalog = match load_s3tables_catalog_from_env().await {
             Ok(Some(catalog)) => catalog,
             Ok(None) => return,
-            Err(e) => panic!("Error loading catalog: {}", e),
+            Err(e) => panic!("Error loading catalog: {e}"),
         };
 
         // Create a test namespace and table
diff --git a/crates/catalog/sql/src/catalog.rs 
b/crates/catalog/sql/src/catalog.rs
index b6dc4f835..77b35a228 100644
--- a/crates/catalog/sql/src/catalog.rs
+++ b/crates/catalog/sql/src/catalog.rs
@@ -410,7 +410,7 @@ impl Catalog for SqlCatalog {
         if exists {
             return Err(Error::new(
                 iceberg::ErrorKind::Unexpected,
-                format!("Namespace {:?} already exists", namespace),
+                format!("Namespace {namespace:?} already exists"),
             ));
         }
 
@@ -1608,7 +1608,7 @@ mod tests {
 
         assert_eq!(
             err.message(),
-            format!("No such namespace: {:?}", namespace_ident)
+            format!("No such namespace: {namespace_ident:?}")
         );
     }
 
@@ -1630,7 +1630,7 @@ mod tests {
 
         assert_eq!(
             err.message(),
-            format!("No such namespace: {:?}", namespace_ident)
+            format!("No such namespace: {namespace_ident:?}")
         );
     }
 
@@ -1714,10 +1714,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "Unexpected => No such namespace: {:?}",
-                non_existent_namespace_ident
-            )
+            format!("Unexpected => No such namespace: 
{non_existent_namespace_ident:?}")
         )
     }
 
@@ -1735,10 +1732,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "Unexpected => No such namespace: {:?}",
-                non_existent_namespace_ident
-            )
+            format!("Unexpected => No such namespace: 
{non_existent_namespace_ident:?}")
         )
     }
 
@@ -1785,10 +1779,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "Unexpected => No such namespace: {:?}",
-                non_existent_namespace_ident
-            ),
+            format!("Unexpected => No such namespace: 
{non_existent_namespace_ident:?}"),
         );
     }
 
@@ -1850,10 +1841,8 @@ mod tests {
 
         let table_name = "tbl1";
         let expected_table_ident = TableIdent::new(namespace_ident.clone(), 
table_name.into());
-        let expected_table_metadata_location_regex = format!(
-            "^{}/tbl1/metadata/00000-{}.metadata.json$",
-            namespace_location, UUID_REGEX_STR,
-        );
+        let expected_table_metadata_location_regex =
+            
format!("^{namespace_location}/tbl1/metadata/00000-{UUID_REGEX_STR}.metadata.json$",);
 
         let table = catalog
             .create_table(
@@ -1908,8 +1897,7 @@ mod tests {
         let expected_table_ident =
             TableIdent::new(nested_namespace_ident.clone(), table_name.into());
         let expected_table_metadata_location_regex = format!(
-            "^{}/tbl1/metadata/00000-{}.metadata.json$",
-            nested_namespace_location, UUID_REGEX_STR,
+            
"^{nested_namespace_location}/tbl1/metadata/00000-{UUID_REGEX_STR}.metadata.json$",
         );
 
         let table = catalog
@@ -1947,10 +1935,8 @@ mod tests {
 
         let table_name = "tbl1";
         let expected_table_ident = TableIdent::new(namespace_ident.clone(), 
table_name.into());
-        let expected_table_metadata_location_regex = format!(
-            "^{}/a/tbl1/metadata/00000-{}.metadata.json$",
-            warehouse_loc, UUID_REGEX_STR
-        );
+        let expected_table_metadata_location_regex =
+            
format!("^{warehouse_loc}/a/tbl1/metadata/00000-{UUID_REGEX_STR}.metadata.json$");
 
         let table = catalog
             .create_table(
@@ -1986,10 +1972,8 @@ mod tests {
         let table_name = "tbl1";
         let expected_table_ident =
             TableIdent::new(nested_namespace_ident.clone(), table_name.into());
-        let expected_table_metadata_location_regex = format!(
-            "^{}/a/b/tbl1/metadata/00000-{}.metadata.json$",
-            warehouse_loc, UUID_REGEX_STR
-        );
+        let expected_table_metadata_location_regex =
+            
format!("^{warehouse_loc}/a/b/tbl1/metadata/00000-{UUID_REGEX_STR}.metadata.json$");
 
         let table = catalog
             .create_table(
@@ -2152,10 +2136,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "Unexpected => No such namespace: {:?}",
-                non_existent_dst_namespace_ident
-            ),
+            format!("Unexpected => No such namespace: 
{non_existent_dst_namespace_ident:?}"),
         );
     }
 
@@ -2174,7 +2155,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!("Unexpected => No such table: {:?}", src_table_ident),
+            format!("Unexpected => No such table: {src_table_ident:?}"),
         );
     }
 
diff --git a/crates/catalog/sql/src/error.rs b/crates/catalog/sql/src/error.rs
index 15b56e8e2..a08f75559 100644
--- a/crates/catalog/sql/src/error.rs
+++ b/crates/catalog/sql/src/error.rs
@@ -29,20 +29,20 @@ pub fn from_sqlx_error(error: sqlx::Error) -> Error {
 pub fn no_such_namespace_err<T>(namespace: &NamespaceIdent) -> Result<T> {
     Err(Error::new(
         ErrorKind::Unexpected,
-        format!("No such namespace: {:?}", namespace),
+        format!("No such namespace: {namespace:?}"),
     ))
 }
 
 pub fn no_such_table_err<T>(table_ident: &TableIdent) -> Result<T> {
     Err(Error::new(
         ErrorKind::Unexpected,
-        format!("No such table: {:?}", table_ident),
+        format!("No such table: {table_ident:?}"),
     ))
 }
 
 pub fn table_already_exists_err<T>(table_ident: &TableIdent) -> Result<T> {
     Err(Error::new(
         ErrorKind::Unexpected,
-        format!("Table {:?} already exists.", table_ident),
+        format!("Table {table_ident:?} already exists."),
     ))
 }
diff --git a/crates/examples/src/rest_catalog_namespace.rs 
b/crates/examples/src/rest_catalog_namespace.rs
index 27196a269..d871de438 100644
--- a/crates/examples/src/rest_catalog_namespace.rs
+++ b/crates/examples/src/rest_catalog_namespace.rs
@@ -45,10 +45,7 @@ async fn main() {
     // ANCHOR: list_all_namespace
     // List all namespaces already in the catalog.
     let existing_namespaces = catalog.list_namespaces(None).await.unwrap();
-    println!(
-        "Namespaces alreading in the existing catalog: {:?}",
-        existing_namespaces
-    );
+    println!("Namespaces alreading in the existing catalog: 
{existing_namespaces:?}");
     // ANCHOR_END: list_all_namespace
 
     // ANCHOR: create_namespace
@@ -70,9 +67,9 @@ async fn main() {
         )
         .await
         .unwrap();
-    println!("Namespace {:?} created!", namespace_ident);
+    println!("Namespace {namespace_ident:?} created!");
 
     let loaded_namespace = 
catalog.get_namespace(&namespace_ident).await.unwrap();
-    println!("Namespace loaded!\n\nNamespace: {:#?}", loaded_namespace,);
+    println!("Namespace loaded!\n\nNamespace: {loaded_namespace:#?}",);
     // ANCHOR_END: create_namespace
 }
diff --git a/crates/examples/src/rest_catalog_table.rs 
b/crates/examples/src/rest_catalog_table.rs
index 306d886ae..64acb263f 100644
--- a/crates/examples/src/rest_catalog_table.rs
+++ b/crates/examples/src/rest_catalog_table.rs
@@ -96,6 +96,6 @@ async fn main() {
 
     // Load the table back from the catalog. It should be identical to the 
created table.
     let loaded_table = catalog.load_table(&table_ident).await.unwrap();
-    println!("Table {TABLE_NAME} loaded!\n\nTable: {:?}", loaded_table);
+    println!("Table {TABLE_NAME} loaded!\n\nTable: {loaded_table:?}");
     // ANCHOR_END: load_table
 }
diff --git a/crates/iceberg/src/arrow/caching_delete_file_loader.rs 
b/crates/iceberg/src/arrow/caching_delete_file_loader.rs
index 9cf605680..77f29b7f1 100644
--- a/crates/iceberg/src/arrow/caching_delete_file_loader.rs
+++ b/crates/iceberg/src/arrow/caching_delete_file_loader.rs
@@ -568,7 +568,7 @@ mod tests {
         )
         .await
         .expect("error parsing batch stream");
-        println!("{}", parsed_eq_delete);
+        println!("{parsed_eq_delete}");
 
         let expected = "((((y != 1) OR (z != 100)) OR (a != \"HELP\")) OR (sa 
!= 4)) AND ((((y != 2) OR (z IS NOT NULL)) OR (a IS NOT NULL)) OR (sa != 
5))".to_string();
 
diff --git a/crates/iceberg/src/arrow/delete_file_loader.rs 
b/crates/iceberg/src/arrow/delete_file_loader.rs
index 592ef2eb4..54fc4a58a 100644
--- a/crates/iceberg/src/arrow/delete_file_loader.rs
+++ b/crates/iceberg/src/arrow/delete_file_loader.rs
@@ -66,7 +66,7 @@ impl BasicDeleteFileLoader {
         )
         .await?
         .build()?
-        .map_err(|e| Error::new(ErrorKind::Unexpected, format!("{}", e)));
+        .map_err(|e| Error::new(ErrorKind::Unexpected, format!("{e}")));
 
         Ok(Box::pin(record_batch_stream) as ArrowRecordBatchStream)
     }
diff --git a/crates/iceberg/src/arrow/partition_value_calculator.rs 
b/crates/iceberg/src/arrow/partition_value_calculator.rs
index 140950345..3520f75ac 100644
--- a/crates/iceberg/src/arrow/partition_value_calculator.rs
+++ b/crates/iceberg/src/arrow/partition_value_calculator.rs
@@ -159,7 +159,7 @@ impl PartitionValueCalculator {
             .map_err(|e| {
                 Error::new(
                     ErrorKind::DataInvalid,
-                    format!("Failed to create partition struct array: {}", e),
+                    format!("Failed to create partition struct array: {e}"),
                 )
             })?;
 
diff --git a/crates/iceberg/src/arrow/reader.rs 
b/crates/iceberg/src/arrow/reader.rs
index c6f5af2f2..3c18c0337 100644
--- a/crates/iceberg/src/arrow/reader.rs
+++ b/crates/iceberg/src/arrow/reader.rs
@@ -799,8 +799,7 @@ fn build_field_id_map(parquet_schema: &SchemaDescriptor) -> 
Result<HashMap<i32,
                 return Err(Error::new(
                     ErrorKind::DataInvalid,
                     format!(
-                        "Leave column in schema should be primitive type but 
got {:?}",
-                        field_type
+                        "Leave column in schema should be primitive type but 
got {field_type:?}"
                     ),
                 ));
             }
@@ -1786,7 +1785,7 @@ message schema {
                 start: 0,
                 length: 0,
                 record_count: None,
-                data_file_path: format!("{}/1.parquet", table_location),
+                data_file_path: format!("{table_location}/1.parquet"),
                 data_file_format: DataFileFormat::Parquet,
                 schema: schema.clone(),
                 project_field_ids: vec![1],
@@ -2151,21 +2150,16 @@ message schema {
             .unwrap();
 
         let total_rows_task2: usize = result2.iter().map(|b| 
b.num_rows()).sum();
-        println!(
-            "Task 2 (bytes {}-{}) returned {} rows",
-            rg1_start, file_end, total_rows_task2
-        );
+        println!("Task 2 (bytes {rg1_start}-{file_end}) returned 
{total_rows_task2} rows");
 
         assert_eq!(
             total_rows_task1, 100,
-            "Task 1 should read only the first row group (100 rows), but got 
{} rows",
-            total_rows_task1
+            "Task 1 should read only the first row group (100 rows), but got 
{total_rows_task1} rows"
         );
 
         assert_eq!(
             total_rows_task2, 200,
-            "Task 2 should read only the second+third row groups (200 rows), 
but got {} rows",
-            total_rows_task2
+            "Task 2 should read only the second+third row groups (200 rows), 
but got {total_rows_task2} rows"
         );
 
         // Verify the actual data values are correct (not just the row count)
@@ -2176,7 +2170,7 @@ message schema {
                 .as_primitive::<arrow_array::types::Int32Type>();
             let first_val = id_col.value(0);
             let last_val = id_col.value(id_col.len() - 1);
-            println!("Task 1 data range: {} to {}", first_val, last_val);
+            println!("Task 1 data range: {first_val} to {last_val}");
 
             assert_eq!(first_val, 0, "Task 1 should start with id=0");
             assert_eq!(last_val, 99, "Task 1 should end with id=99");
@@ -2188,7 +2182,7 @@ message schema {
                 .column(0)
                 .as_primitive::<arrow_array::types::Int32Type>();
             let first_val = id_col.value(0);
-            println!("Task 2 first value: {}", first_val);
+            println!("Task 2 first value: {first_val}");
 
             assert_eq!(first_val, 100, "Task 2 should start with id=100, not 
id=0");
         }
@@ -2246,7 +2240,7 @@ message schema {
                 start: 0,
                 length: 0,
                 record_count: None,
-                data_file_path: format!("{}/old_file.parquet", table_location),
+                data_file_path: format!("{table_location}/old_file.parquet"),
                 data_file_format: DataFileFormat::Parquet,
                 schema: new_schema.clone(),
                 project_field_ids: vec![1, 2], // Request both columns 'a' and 
'b'
diff --git a/crates/iceberg/src/arrow/record_batch_partition_splitter.rs 
b/crates/iceberg/src/arrow/record_batch_partition_splitter.rs
index 66371fac1..2508a0032 100644
--- a/crates/iceberg/src/arrow/record_batch_partition_splitter.rs
+++ b/crates/iceberg/src/arrow/record_batch_partition_splitter.rs
@@ -149,8 +149,7 @@ impl RecordBatchPartitionSplitter {
                     Error::new(
                         ErrorKind::DataInvalid,
                         format!(
-                            "Partition column '{}' not found in batch",
-                            PROJECTED_PARTITION_VALUE_COLUMN
+                            "Partition column 
'{PROJECTED_PARTITION_VALUE_COLUMN}' not found in batch"
                         ),
                     )
                 })?;
diff --git a/crates/iceberg/src/arrow/record_batch_transformer.rs 
b/crates/iceberg/src/arrow/record_batch_transformer.rs
index 71fe59dea..5fbbbb106 100644
--- a/crates/iceberg/src/arrow/record_batch_transformer.rs
+++ b/crates/iceberg/src/arrow/record_batch_transformer.rs
@@ -307,7 +307,7 @@ impl RecordBatchTransformer {
                     let Literal::Primitive(primitive_literal) = 
iceberg_default_value else {
                         return Err(Error::new(
                             ErrorKind::Unexpected,
-                            format!("Default value for column must be 
primitive type, but encountered {:?}", iceberg_default_value)
+                            format!("Default value for column must be 
primitive type, but encountered {iceberg_default_value:?}")
                         ));
                     };
                     Some(primitive_literal.clone())
@@ -341,7 +341,7 @@ impl RecordBatchTransformer {
                 .map_err(|e| {
                     Error::new(
                         ErrorKind::DataInvalid,
-                        format!("field id not parseable as an i32: {}", e),
+                        format!("field id not parseable as an i32: {e}"),
                     )
                 })?;
 
@@ -447,7 +447,7 @@ impl RecordBatchTransformer {
             (dt, _) => {
                 return Err(Error::new(
                     ErrorKind::Unexpected,
-                    format!("unexpected target column type {}", dt),
+                    format!("unexpected target column type {dt}"),
                 ));
             }
         })
diff --git a/crates/iceberg/src/arrow/schema.rs 
b/crates/iceberg/src/arrow/schema.rs
index c214821dd..e10db3a5a 100644
--- a/crates/iceberg/src/arrow/schema.rs
+++ b/crates/iceberg/src/arrow/schema.rs
@@ -701,10 +701,7 @@ pub(crate) fn get_arrow_datum(datum: &Datum) -> 
Result<Arc<dyn ArrowDatum + Send
 
         (primitive_type, _) => Err(Error::new(
             ErrorKind::FeatureUnsupported,
-            format!(
-                "Converting datum from type {:?} to arrow not supported yet.",
-                primitive_type
-            ),
+            format!("Converting datum from type {primitive_type:?} to arrow 
not supported yet."),
         )),
     }
 }
@@ -789,7 +786,7 @@ pub(crate) fn get_parquet_stat_min_as_datum(
                 
PrimitiveLiteral::Int128(unscaled_value.to_i128().ok_or_else(|| {
                     Error::new(
                         ErrorKind::DataInvalid,
-                        format!("Can't convert bytes to i128: {:?}", bytes),
+                        format!("Can't convert bytes to i128: {bytes:?}"),
                     )
                 })?),
             ))
@@ -936,7 +933,7 @@ pub(crate) fn get_parquet_stat_max_as_datum(
                 
PrimitiveLiteral::Int128(unscaled_value.to_i128().ok_or_else(|| {
                     Error::new(
                         ErrorKind::DataInvalid,
-                        format!("Can't convert bytes to i128: {:?}", bytes),
+                        format!("Can't convert bytes to i128: {bytes:?}"),
                     )
                 })?),
             ))
@@ -1745,9 +1742,7 @@ mod tests {
 
             assert!(
                 matches!(iceberg_field.field_type.as_ref(), Type::Primitive(t) 
if *t == expected_iceberg_type),
-                "Expected {:?} to map to {:?}",
-                arrow_type,
-                expected_iceberg_type
+                "Expected {arrow_type:?} to map to {expected_iceberg_type:?}"
             );
         }
 
diff --git a/crates/iceberg/src/arrow/value.rs 
b/crates/iceberg/src/arrow/value.rs
index cc3a561d5..f1cf225bb 100644
--- a/crates/iceberg/src/arrow/value.rs
+++ b/crates/iceberg/src/arrow/value.rs
@@ -263,8 +263,7 @@ impl SchemaWithPartnerVisitor<ArrayRef> for 
ArrowArrayToIcebergStructConverter {
                         return Err(Error::new(
                             ErrorKind::DataInvalid,
                             format!(
-                                "The precision or scale ({},{}) of arrow 
decimal128 array is not compatible with iceberg decimal type ({},{})",
-                                arrow_precision, arrow_scale, precision, scale
+                                "The precision or scale 
({arrow_precision},{arrow_scale}) of arrow decimal128 array is not compatible 
with iceberg decimal type ({precision},{scale})"
                             ),
                         ));
                     }
@@ -505,8 +504,7 @@ impl PartnerAccessor<ArrayRef> for ArrowArrayAccessor {
                 Error::new(
                     ErrorKind::DataInvalid,
                     format!(
-                        "The struct partner is not a struct array, partner: 
{:?}",
-                        struct_partner
+                        "The struct partner is not a struct array, partner: 
{struct_partner:?}"
                     ),
                 )
             })?;
diff --git a/crates/iceberg/src/catalog/memory/catalog.rs 
b/crates/iceberg/src/catalog/memory/catalog.rs
index 6ce311141..fdb495f6d 100644
--- a/crates/iceberg/src/catalog/memory/catalog.rs
+++ b/crates/iceberg/src/catalog/memory/catalog.rs
@@ -506,9 +506,7 @@ mod tests {
         let regex = Regex::new(regex_str).unwrap();
         assert!(
             regex.is_match(&actual),
-            "Expected metadata location to match regex, but got location: {} 
and regex: {}",
-            actual,
-            regex
+            "Expected metadata location to match regex, but got location: 
{actual} and regex: {regex}"
         )
     }
 
@@ -888,10 +886,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "NamespaceNotFound => No such namespace: {:?}",
-                non_existent_namespace_ident
-            )
+            format!("NamespaceNotFound => No such namespace: 
{non_existent_namespace_ident:?}")
         )
     }
 
@@ -975,10 +970,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "NamespaceNotFound => No such namespace: {:?}",
-                non_existent_namespace_ident
-            )
+            format!("NamespaceNotFound => No such namespace: 
{non_existent_namespace_ident:?}")
         )
     }
 
@@ -1058,10 +1050,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "NamespaceNotFound => No such namespace: {:?}",
-                non_existent_namespace_ident
-            )
+            format!("NamespaceNotFound => No such namespace: 
{non_existent_namespace_ident:?}")
         )
     }
 
@@ -1078,10 +1067,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "NamespaceNotFound => No such namespace: {:?}",
-                non_existent_namespace_ident
-            )
+            format!("NamespaceNotFound => No such namespace: 
{non_existent_namespace_ident:?}")
         )
     }
 
@@ -1168,10 +1154,8 @@ mod tests {
 
         let table_name = "tbl1";
         let expected_table_ident = TableIdent::new(namespace_ident.clone(), 
table_name.into());
-        let expected_table_metadata_location_regex = format!(
-            "^{}/tbl1/metadata/00000-{}.metadata.json$",
-            namespace_location, UUID_REGEX_STR,
-        );
+        let expected_table_metadata_location_regex =
+            
format!("^{namespace_location}/tbl1/metadata/00000-{UUID_REGEX_STR}.metadata.json$",);
 
         let table = catalog
             .create_table(
@@ -1230,8 +1214,7 @@ mod tests {
         let expected_table_ident =
             TableIdent::new(nested_namespace_ident.clone(), table_name.into());
         let expected_table_metadata_location_regex = format!(
-            "^{}/tbl1/metadata/00000-{}.metadata.json$",
-            nested_namespace_location, UUID_REGEX_STR,
+            
"^{nested_namespace_location}/tbl1/metadata/00000-{UUID_REGEX_STR}.metadata.json$",
         );
 
         let table = catalog
@@ -1278,10 +1261,8 @@ mod tests {
 
         let table_name = "tbl1";
         let expected_table_ident = TableIdent::new(namespace_ident.clone(), 
table_name.into());
-        let expected_table_metadata_location_regex = format!(
-            "^{}/a/tbl1/metadata/00000-{}.metadata.json$",
-            warehouse_location, UUID_REGEX_STR
-        );
+        let expected_table_metadata_location_regex =
+            
format!("^{warehouse_location}/a/tbl1/metadata/00000-{UUID_REGEX_STR}.metadata.json$");
 
         let table = catalog
             .create_table(
@@ -1335,8 +1316,7 @@ mod tests {
         let expected_table_ident =
             TableIdent::new(nested_namespace_ident.clone(), table_name.into());
         let expected_table_metadata_location_regex = format!(
-            "^{}/a/b/tbl1/metadata/00000-{}.metadata.json$",
-            warehouse_location, UUID_REGEX_STR
+            
"^{warehouse_location}/a/b/tbl1/metadata/00000-{UUID_REGEX_STR}.metadata.json$"
         );
 
         let table = catalog
@@ -1499,10 +1479,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "NamespaceNotFound => No such namespace: {:?}",
-                non_existent_namespace_ident
-            ),
+            format!("NamespaceNotFound => No such namespace: 
{non_existent_namespace_ident:?}"),
         );
     }
 
@@ -1549,10 +1526,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "NamespaceNotFound => No such namespace: {:?}",
-                non_existent_namespace_ident
-            ),
+            format!("NamespaceNotFound => No such namespace: 
{non_existent_namespace_ident:?}"),
         );
     }
 
@@ -1570,10 +1544,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "TableNotFound => No such table: {:?}",
-                non_existent_table_ident
-            ),
+            format!("TableNotFound => No such table: 
{non_existent_table_ident:?}"),
         );
     }
 
@@ -1638,10 +1609,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "NamespaceNotFound => No such namespace: {:?}",
-                non_existent_namespace_ident
-            ),
+            format!("NamespaceNotFound => No such namespace: 
{non_existent_namespace_ident:?}"),
         );
     }
 
@@ -1753,10 +1721,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "NamespaceNotFound => No such namespace: {:?}",
-                non_existent_src_namespace_ident
-            ),
+            format!("NamespaceNotFound => No such namespace: 
{non_existent_src_namespace_ident:?}"),
         );
     }
 
@@ -1777,10 +1742,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!(
-                "NamespaceNotFound => No such namespace: {:?}",
-                non_existent_dst_namespace_ident
-            ),
+            format!("NamespaceNotFound => No such namespace: 
{non_existent_dst_namespace_ident:?}"),
         );
     }
 
@@ -1798,7 +1760,7 @@ mod tests {
                 .await
                 .unwrap_err()
                 .to_string(),
-            format!("TableNotFound => No such table: {:?}", src_table_ident),
+            format!("TableNotFound => No such table: {src_table_ident:?}"),
         );
     }
 
diff --git a/crates/iceberg/src/catalog/memory/namespace_state.rs 
b/crates/iceberg/src/catalog/memory/namespace_state.rs
index 2fc481b76..d7dd6c4b2 100644
--- a/crates/iceberg/src/catalog/memory/namespace_state.rs
+++ b/crates/iceberg/src/catalog/memory/namespace_state.rs
@@ -36,34 +36,28 @@ pub(crate) struct NamespaceState {
 fn no_such_namespace_err<T>(namespace_ident: &NamespaceIdent) -> Result<T> {
     Err(Error::new(
         ErrorKind::NamespaceNotFound,
-        format!("No such namespace: {:?}", namespace_ident),
+        format!("No such namespace: {namespace_ident:?}"),
     ))
 }
 
 fn no_such_table_err<T>(table_ident: &TableIdent) -> Result<T> {
     Err(Error::new(
         ErrorKind::TableNotFound,
-        format!("No such table: {:?}", table_ident),
+        format!("No such table: {table_ident:?}"),
     ))
 }
 
 fn namespace_already_exists_err<T>(namespace_ident: &NamespaceIdent) -> 
Result<T> {
     Err(Error::new(
         ErrorKind::NamespaceAlreadyExists,
-        format!(
-            "Cannot create namespace {:?}. Namespace already exists.",
-            namespace_ident
-        ),
+        format!("Cannot create namespace {namespace_ident:?}. Namespace 
already exists."),
     ))
 }
 
 fn table_already_exists_err<T>(table_ident: &TableIdent) -> Result<T> {
     Err(Error::new(
         ErrorKind::TableAlreadyExists,
-        format!(
-            "Cannot create table {:?}. Table already exists.",
-            table_ident
-        ),
+        format!("Cannot create table {table_ident:?}. Table already exists."),
     ))
 }
 
diff --git a/crates/iceberg/src/catalog/metadata_location.rs 
b/crates/iceberg/src/catalog/metadata_location.rs
index 8cb5cb11d..3705ee42d 100644
--- a/crates/iceberg/src/catalog/metadata_location.rs
+++ b/crates/iceberg/src/catalog/metadata_location.rs
@@ -53,10 +53,7 @@ impl MetadataLocation {
     fn parse_metadata_path_prefix(path: &str) -> Result<String> {
         let prefix = path.strip_suffix("/metadata").ok_or(Error::new(
             ErrorKind::Unexpected,
-            format!(
-                "Metadata location not under \"/metadata\" subdirectory: {}",
-                path
-            ),
+            format!("Metadata location not under \"/metadata\" subdirectory: 
{path}"),
         ))?;
 
         Ok(prefix.to_string())
@@ -68,12 +65,12 @@ impl MetadataLocation {
             .strip_suffix(".metadata.json")
             .ok_or(Error::new(
                 ErrorKind::Unexpected,
-                format!("Invalid metadata file ending: {}", file_name),
+                format!("Invalid metadata file ending: {file_name}"),
             ))?
             .split_once('-')
             .ok_or(Error::new(
                 ErrorKind::Unexpected,
-                format!("Invalid metadata file name format: {}", file_name),
+                format!("Invalid metadata file name format: {file_name}"),
             ))?;
 
         Ok((version.parse::<i32>()?, Uuid::parse_str(id)?))
@@ -96,7 +93,7 @@ impl FromStr for MetadataLocation {
     fn from_str(s: &str) -> Result<Self> {
         let (path, file_name) = s.rsplit_once('/').ok_or(Error::new(
             ErrorKind::Unexpected,
-            format!("Invalid metadata location: {}", s),
+            format!("Invalid metadata location: {s}"),
         ))?;
 
         let prefix = Self::parse_metadata_path_prefix(path)?;
diff --git a/crates/iceberg/src/catalog/mod.rs 
b/crates/iceberg/src/catalog/mod.rs
index 77b171ee3..ec4b77fe0 100644
--- a/crates/iceberg/src/catalog/mod.rs
+++ b/crates/iceberg/src/catalog/mod.rs
@@ -662,7 +662,7 @@ impl TableRequirement {
                         let snapshot_ref = snapshot_ref.ok_or(
                             Error::new(
                                 ErrorKind::CatalogCommitConflicts,
-                                format!("Requirement failed: Branch or tag 
`{}` not found", r#ref),
+                                format!("Requirement failed: Branch or tag 
`{ref}` not found"),
                             )
                             .with_retryable(true),
                         )?;
@@ -670,8 +670,7 @@ impl TableRequirement {
                             return Err(Error::new(
                                 ErrorKind::CatalogCommitConflicts,
                                 format!(
-                                    "Requirement failed: Branch or tag `{}`'s 
snapshot has changed",
-                                    r#ref
+                                    "Requirement failed: Branch or tag 
`{ref}`'s snapshot has changed"
                                 ),
                             )
                             .with_context("expected", snapshot_id.to_string())
@@ -682,10 +681,7 @@ impl TableRequirement {
                         // a null snapshot ID means the ref should not exist 
already
                         return Err(Error::new(
                             ErrorKind::CatalogCommitConflicts,
-                            format!(
-                                "Requirement failed: Branch or tag `{}` 
already exists",
-                                r#ref
-                            ),
+                            format!("Requirement failed: Branch or tag `{ref}` 
already exists"),
                         )
                         .with_retryable(true));
                     }
diff --git a/crates/iceberg/src/expr/predicate.rs 
b/crates/iceberg/src/expr/predicate.rs
index 123cfada5..35e88ff3e 100644
--- a/crates/iceberg/src/expr/predicate.rs
+++ b/crates/iceberg/src/expr/predicate.rs
@@ -54,7 +54,7 @@ impl<'de, T: Deserialize<'de>, const N: usize> 
Deserialize<'de> for LogicalExpre
         let inputs = Vec::<Box<T>>::deserialize(deserializer)?;
         Ok(LogicalExpression::new(
             array_init::from_iter(inputs.into_iter()).ok_or_else(|| {
-                serde::de::Error::custom(format!("Failed to deserialize 
LogicalExpression: the len of inputs is not match with the len of 
LogicalExpression {}",N))
+                serde::de::Error::custom(format!("Failed to deserialize 
LogicalExpression: the len of inputs is not match with the len of 
LogicalExpression {N}"))
             })?,
         ))
     }
@@ -310,7 +310,7 @@ impl<T: Bind> Bind for SetExpression<T> {
 
 impl<T: Display + Debug> Display for SetExpression<T> {
     fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
-        let mut literal_strs = self.literals.iter().map(|l| format!("{}", l));
+        let mut literal_strs = self.literals.iter().map(|l| format!("{l}"));
 
         write!(f, "{} {} ({})", self.term, self.op, literal_strs.join(", "))
     }
@@ -529,13 +529,13 @@ impl Display for Predicate {
                 write!(f, "NOT ({})", expr.inputs()[0])
             }
             Predicate::Unary(expr) => {
-                write!(f, "{}", expr)
+                write!(f, "{expr}")
             }
             Predicate::Binary(expr) => {
-                write!(f, "{}", expr)
+                write!(f, "{expr}")
             }
             Predicate::Set(expr) => {
-                write!(f, "{}", expr)
+                write!(f, "{expr}")
             }
         }
     }
@@ -805,13 +805,13 @@ impl Display for BoundPredicate {
                 write!(f, "NOT ({})", expr.inputs()[0])
             }
             BoundPredicate::Unary(expr) => {
-                write!(f, "{}", expr)
+                write!(f, "{expr}")
             }
             BoundPredicate::Binary(expr) => {
-                write!(f, "{}", expr)
+                write!(f, "{expr}")
             }
             BoundPredicate::Set(expr) => {
-                write!(f, "{}", expr)
+                write!(f, "{expr}")
             }
         }
     }
diff --git a/crates/iceberg/src/expr/visitors/page_index_evaluator.rs 
b/crates/iceberg/src/expr/visitors/page_index_evaluator.rs
index ea56c32c6..3745d94d1 100644
--- a/crates/iceberg/src/expr/visitors/page_index_evaluator.rs
+++ b/crates/iceberg/src/expr/visitors/page_index_evaluator.rs
@@ -150,17 +150,14 @@ impl<'a> PageIndexEvaluator<'a> {
         let Some(field) = self.snapshot_schema.field_by_id(field_id) else {
             return Err(Error::new(
                 ErrorKind::Unexpected,
-                format!("Field with id {} missing from snapshot schema", 
field_id),
+                format!("Field with id {field_id} missing from snapshot 
schema"),
             ));
         };
 
         let Some(field_type) = field.field_type.as_primitive_type() else {
             return Err(Error::new(
                 ErrorKind::Unexpected,
-                format!(
-                    "Field with id {} not convertible to primitive type",
-                    field_id
-                ),
+                format!("Field with id {field_id} not convertible to primitive 
type"),
             ));
         };
 
@@ -180,7 +177,7 @@ impl<'a> PageIndexEvaluator<'a> {
                         // if we have a column index, we should always have an 
offset index.
                         return Err(Error::new(
                             ErrorKind::Unexpected,
-                            format!("Missing offset index for field id {}", 
field_id),
+                            format!("Missing offset index for field id 
{field_id}"),
                         ));
                     };
 
diff --git a/crates/iceberg/src/expr/visitors/strict_metrics_evaluator.rs 
b/crates/iceberg/src/expr/visitors/strict_metrics_evaluator.rs
index e17c44c65..e9bed775e 100644
--- a/crates/iceberg/src/expr/visitors/strict_metrics_evaluator.rs
+++ b/crates/iceberg/src/expr/visitors/strict_metrics_evaluator.rs
@@ -1045,8 +1045,7 @@ mod test {
             // For zero-record files, strict eval returns MUST_MATCH.
             assert!(
                 result,
-                "Strict eval: Should read zero-record file for expression 
{:?}",
-                expr
+                "Strict eval: Should read zero-record file for expression 
{expr:?}"
             );
         }
     }
@@ -1142,11 +1141,7 @@ mod test {
 
         let result =
             StrictMetricsEvaluator::eval(&less_than_int("id", INT_MIN_VALUE), 
&file).unwrap();
-        assert!(
-            !result,
-            "Strict eval: id < {} should be false",
-            INT_MIN_VALUE
-        );
+        assert!(!result, "Strict eval: id < {INT_MIN_VALUE} should be false");
 
         let result =
             StrictMetricsEvaluator::eval(&less_than_int("id", INT_MIN_VALUE + 
1), &file).unwrap();
@@ -1158,11 +1153,7 @@ mod test {
 
         let result =
             StrictMetricsEvaluator::eval(&less_than_int("id", INT_MAX_VALUE), 
&file).unwrap();
-        assert!(
-            !result,
-            "Strict eval: id < {} should be false",
-            INT_MAX_VALUE
-        );
+        assert!(!result, "Strict eval: id < {INT_MAX_VALUE} should be false");
 
         let result =
             StrictMetricsEvaluator::eval(&less_than_int("id", INT_MAX_VALUE + 
1), &file).unwrap();
@@ -1200,18 +1191,13 @@ mod test {
                 .unwrap();
         assert!(
             !result,
-            "Strict eval: id <= {} should be false",
-            INT_MIN_VALUE
+            "Strict eval: id <= {INT_MIN_VALUE} should be false"
         );
 
         let result =
             StrictMetricsEvaluator::eval(&less_than_or_equal_int("id", 
INT_MAX_VALUE), &file)
                 .unwrap();
-        assert!(
-            result,
-            "Strict eval: id <= {} should be true",
-            INT_MAX_VALUE
-        );
+        assert!(result, "Strict eval: id <= {INT_MAX_VALUE} should be true");
 
         let result =
             StrictMetricsEvaluator::eval(&less_than_or_equal_int("id", 
INT_MAX_VALUE + 1), &file)
@@ -1238,19 +1224,11 @@ mod test {
 
         let result =
             StrictMetricsEvaluator::eval(&greater_than_int("id", 
INT_MAX_VALUE), &file).unwrap();
-        assert!(
-            !result,
-            "Strict eval: id > {} should be false",
-            INT_MAX_VALUE
-        );
+        assert!(!result, "Strict eval: id > {INT_MAX_VALUE} should be false");
 
         let result =
             StrictMetricsEvaluator::eval(&greater_than_int("id", 
INT_MIN_VALUE), &file).unwrap();
-        assert!(
-            !result,
-            "Strict eval: id > {} should be false",
-            INT_MIN_VALUE
-        );
+        assert!(!result, "Strict eval: id > {INT_MIN_VALUE} should be false");
 
         let result =
             StrictMetricsEvaluator::eval(&greater_than_int("id", INT_MIN_VALUE 
- 1), &file)
@@ -1302,18 +1280,13 @@ mod test {
                 .unwrap();
         assert!(
             !result,
-            "Strict eval: id >= {} should be false",
-            INT_MAX_VALUE
+            "Strict eval: id >= {INT_MAX_VALUE} should be false"
         );
 
         let result =
             StrictMetricsEvaluator::eval(&greater_than_or_equal_int("id", 
INT_MIN_VALUE), &file)
                 .unwrap();
-        assert!(
-            result,
-            "Strict eval: id >= {} should be true",
-            INT_MIN_VALUE
-        );
+        assert!(result, "Strict eval: id >= {INT_MIN_VALUE} should be true");
 
         let result = StrictMetricsEvaluator::eval(
             &greater_than_or_equal_int("id", INT_MIN_VALUE - 1),
diff --git a/crates/iceberg/src/io/object_cache.rs 
b/crates/iceberg/src/io/object_cache.rs
index a23ff36b3..cb9f03fa5 100644
--- a/crates/iceberg/src/io/object_cache.rs
+++ b/crates/iceberg/src/io/object_cache.rs
@@ -113,7 +113,7 @@ impl ObjectCache {
             CachedItem::Manifest(arc_manifest) => Ok(arc_manifest),
             _ => Err(Error::new(
                 ErrorKind::Unexpected,
-                format!("cached object for key '{:?}' is not a Manifest", key),
+                format!("cached object for key '{key:?}' is not a Manifest"),
             )),
         }
     }
@@ -157,7 +157,7 @@ impl ObjectCache {
             CachedItem::ManifestList(arc_manifest_list) => 
Ok(arc_manifest_list),
             _ => Err(Error::new(
                 ErrorKind::Unexpected,
-                format!("cached object for path '{:?}' is not a manifest 
list", key),
+                format!("cached object for path '{key:?}' is not a manifest 
list"),
             )),
         }
     }
diff --git a/crates/iceberg/src/io/storage.rs b/crates/iceberg/src/io/storage.rs
index 3de4f10db..d5f2ad8fa 100644
--- a/crates/iceberg/src/io/storage.rs
+++ b/crates/iceberg/src/io/storage.rs
@@ -162,7 +162,7 @@ impl Storage {
                 } else {
                     Err(Error::new(
                         ErrorKind::DataInvalid,
-                        format!("Invalid s3 url: {}, should start with {}", 
path, prefix),
+                        format!("Invalid s3 url: {path}, should start with 
{prefix}"),
                     ))
                 }
             }
diff --git a/crates/iceberg/src/io/storage_s3.rs 
b/crates/iceberg/src/io/storage_s3.rs
index f2408331c..fcf9afed1 100644
--- a/crates/iceberg/src/io/storage_s3.rs
+++ b/crates/iceberg/src/io/storage_s3.rs
@@ -127,8 +127,7 @@ pub(crate) fn s3_config_parse(mut m: HashMap<String, 
String>) -> Result<S3Config
                 return Err(Error::new(
                     ErrorKind::DataInvalid,
                     format!(
-                        "Invalid {}: {}. Expected one of (custom, kms, s3, 
none)",
-                        S3_SSE_TYPE, sse_type
+                        "Invalid {S3_SSE_TYPE}: {sse_type}. Expected one of 
(custom, kms, s3, none)"
                     ),
                 ));
             }
@@ -164,7 +163,7 @@ pub(crate) fn s3_config_build(
     let bucket = url.host_str().ok_or_else(|| {
         Error::new(
             ErrorKind::DataInvalid,
-            format!("Invalid s3 url: {}, missing bucket", path),
+            format!("Invalid s3 url: {path}, missing bucket"),
         )
     })?;
 
diff --git a/crates/iceberg/src/puffin/metadata.rs 
b/crates/iceberg/src/puffin/metadata.rs
index 6039c7f82..15a8e9b33 100644
--- a/crates/iceberg/src/puffin/metadata.rs
+++ b/crates/iceberg/src/puffin/metadata.rs
@@ -121,10 +121,7 @@ impl Flag {
         } else {
             Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!(
-                    "Unknown flag byte {} and bit {} combination",
-                    byte_idx, bit_idx
-                ),
+                format!("Unknown flag byte {byte_idx} and bit {bit_idx} 
combination"),
             ))
         }
     }
@@ -864,15 +861,14 @@ mod tests {
                     "blobs" : [
                         {{
                             "type" : "type-a",
-                            "fields" : [ {} ],
+                            "fields" : [ {out_of_i32_range_number} ],
                             "snapshot-id" : 14,
                             "sequence-number" : 3,
                             "offset" : 4,
                             "length" : 16
                         }}
                     ]
-                }}"#,
-                out_of_i32_range_number
+                }}"#
             ),
         )
         .await;
@@ -883,8 +879,7 @@ mod tests {
                 .unwrap_err()
                 .to_string(),
             format!(
-                "DataInvalid => Given string is not valid JSON, source: 
invalid value: integer `{}`, expected i32 at line 5 column 51",
-                out_of_i32_range_number
+                "DataInvalid => Given string is not valid JSON, source: 
invalid value: integer `{out_of_i32_range_number}`, expected i32 at line 5 
column 51"
             ),
         )
     }
diff --git a/crates/iceberg/src/scan/cache.rs b/crates/iceberg/src/scan/cache.rs
index fc7e559e6..66242e389 100644
--- a/crates/iceberg/src/scan/cache.rs
+++ b/crates/iceberg/src/scan/cache.rs
@@ -66,7 +66,7 @@ impl PartitionFilterCache {
             .partition_spec_by_id(spec_id)
             .ok_or(Error::new(
                 ErrorKind::Unexpected,
-                format!("Could not find partition spec for id {}", spec_id),
+                format!("Could not find partition spec for id {spec_id}"),
             ))?;
 
         let partition_type = partition_spec.partition_type(schema)?;
diff --git a/crates/iceberg/src/scan/mod.rs b/crates/iceberg/src/scan/mod.rs
index 3d14b3cce..92ee2018c 100644
--- a/crates/iceberg/src/scan/mod.rs
+++ b/crates/iceberg/src/scan/mod.rs
@@ -193,7 +193,7 @@ impl<'a> TableScanBuilder<'a> {
                 .ok_or_else(|| {
                     Error::new(
                         ErrorKind::DataInvalid,
-                        format!("Snapshot with id {} not found", snapshot_id),
+                        format!("Snapshot with id {snapshot_id} not found"),
                     )
                 })?
                 .clone(),
@@ -223,10 +223,7 @@ impl<'a> TableScanBuilder<'a> {
                 if schema.field_by_name(column_name).is_none() {
                     return Err(Error::new(
                         ErrorKind::DataInvalid,
-                        format!(
-                            "Column {} not found in table. Schema: {}",
-                            column_name, schema
-                        ),
+                        format!("Column {column_name} not found in table. 
Schema: {schema}"),
                     ));
                 }
             }
@@ -246,10 +243,7 @@ impl<'a> TableScanBuilder<'a> {
             let field_id = schema.field_id_by_name(column_name).ok_or_else(|| {
                 Error::new(
                     ErrorKind::DataInvalid,
-                    format!(
-                        "Column {} not found in table. Schema: {}",
-                        column_name, schema
-                    ),
+                    format!("Column {column_name} not found in table. Schema: 
{schema}"),
                 )
             })?;
 
@@ -260,8 +254,7 @@ impl<'a> TableScanBuilder<'a> {
                     Error::new(
                         ErrorKind::FeatureUnsupported,
                         format!(
-                            "Column {} is not a direct child of schema but a 
nested field, which is not supported now. Schema: {}",
-                            column_name, schema
+                            "Column {column_name} is not a direct child of 
schema but a nested field, which is not supported now. Schema: {schema}"
                         ),
                     )
                 })?;
diff --git a/crates/iceberg/src/spec/datatypes.rs 
b/crates/iceberg/src/spec/datatypes.rs
index 7c03001f8..19facfb01 100644
--- a/crates/iceberg/src/spec/datatypes.rs
+++ b/crates/iceberg/src/spec/datatypes.rs
@@ -95,8 +95,8 @@ pub enum Type {
 impl fmt::Display for Type {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match self {
-            Type::Primitive(primitive) => write!(f, "{}", primitive),
-            Type::Struct(s) => write!(f, "{}", s),
+            Type::Primitive(primitive) => write!(f, "{primitive}"),
+            Type::Struct(s) => write!(f, "{s}"),
             Type::List(_) => write!(f, "list"),
             Type::Map(_) => write!(f, "map"),
         }
@@ -370,7 +370,7 @@ impl fmt::Display for PrimitiveType {
             PrimitiveType::Float => write!(f, "float"),
             PrimitiveType::Double => write!(f, "double"),
             PrimitiveType::Decimal { precision, scale } => {
-                write!(f, "decimal({},{})", precision, scale)
+                write!(f, "decimal({precision},{scale})")
             }
             PrimitiveType::Date => write!(f, "date"),
             PrimitiveType::Time => write!(f, "time"),
@@ -380,7 +380,7 @@ impl fmt::Display for PrimitiveType {
             PrimitiveType::TimestamptzNs => write!(f, "timestamptz_ns"),
             PrimitiveType::String => write!(f, "string"),
             PrimitiveType::Uuid => write!(f, "uuid"),
-            PrimitiveType::Fixed(size) => write!(f, "fixed({})", size),
+            PrimitiveType::Fixed(size) => write!(f, "fixed({size})"),
             PrimitiveType::Binary => write!(f, "binary"),
         }
     }
@@ -673,7 +673,7 @@ impl fmt::Display for NestedField {
         }
         write!(f, "{} ", self.field_type)?;
         if let Some(doc) = &self.doc {
-            write!(f, "{}", doc)?;
+            write!(f, "{doc}")?;
         }
         Ok(())
     }
diff --git a/crates/iceberg/src/spec/manifest/data_file.rs 
b/crates/iceberg/src/spec/manifest/data_file.rs
index d7455b56f..6c63f6223 100644
--- a/crates/iceberg/src/spec/manifest/data_file.rs
+++ b/crates/iceberg/src/spec/manifest/data_file.rs
@@ -360,7 +360,7 @@ impl TryFrom<i32> for DataContentType {
             2 => Ok(DataContentType::EqualityDeletes),
             _ => Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!("data content type {} is invalid", v),
+                format!("data content type {v} is invalid"),
             )),
         }
     }
@@ -390,7 +390,7 @@ impl FromStr for DataFileFormat {
             "puffin" => Ok(Self::Puffin),
             _ => Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!("Unsupported data file format: {}", s),
+                format!("Unsupported data file format: {s}"),
             )),
         }
     }
diff --git a/crates/iceberg/src/spec/manifest/entry.rs 
b/crates/iceberg/src/spec/manifest/entry.rs
index 7ba9efb3b..d11d8acfe 100644
--- a/crates/iceberg/src/spec/manifest/entry.rs
+++ b/crates/iceberg/src/spec/manifest/entry.rs
@@ -172,7 +172,7 @@ impl TryFrom<i32> for ManifestStatus {
             2 => Ok(ManifestStatus::Deleted),
             _ => Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!("manifest status {} is invalid", v),
+                format!("manifest status {v} is invalid"),
             )),
         }
     }
diff --git a/crates/iceberg/src/spec/manifest_list.rs 
b/crates/iceberg/src/spec/manifest_list.rs
index 43808bb24..16409ffe5 100644
--- a/crates/iceberg/src/spec/manifest_list.rs
+++ b/crates/iceberg/src/spec/manifest_list.rs
@@ -642,10 +642,7 @@ impl TryFrom<i32> for ManifestContentType {
             1 => Ok(ManifestContentType::Deletes),
             _ => Err(Error::new(
                 crate::ErrorKind::DataInvalid,
-                format!(
-                    "Invalid manifest content type. Expected 0 or 1, got {}",
-                    value
-                ),
+                format!("Invalid manifest content type. Expected 0 or 1, got 
{value}"),
             )),
         }
     }
diff --git a/crates/iceberg/src/spec/partition.rs 
b/crates/iceberg/src/spec/partition.rs
index b594895cc..255aabd47 100644
--- a/crates/iceberg/src/spec/partition.rs
+++ b/crates/iceberg/src/spec/partition.rs
@@ -683,7 +683,7 @@ trait CorePartitionSpecValidator {
         if self.fields().iter().any(|f| f.name == name) {
             return Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!("Cannot use partition name more than once: {}", name),
+                format!("Cannot use partition name more than once: {name}"),
             ));
         }
         Ok(())
@@ -715,10 +715,7 @@ trait CorePartitionSpecValidator {
         if self.fields().iter().any(|f| f.field_id == Some(field_id)) {
             return Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!(
-                    "Cannot use field id more than once in one PartitionSpec: 
{}",
-                    field_id
-                ),
+                format!("Cannot use field id more than once in one 
PartitionSpec: {field_id}"),
             ));
         }
 
diff --git a/crates/iceberg/src/spec/schema/id_reassigner.rs 
b/crates/iceberg/src/spec/schema/id_reassigner.rs
index 044383185..5dbb37000 100644
--- a/crates/iceberg/src/spec/schema/id_reassigner.rs
+++ b/crates/iceberg/src/spec/schema/id_reassigner.rs
@@ -122,7 +122,7 @@ impl ReassignFieldIds {
                 self.old_to_new_id.get(&id).copied().ok_or_else(|| {
                     Error::new(
                         ErrorKind::DataInvalid,
-                        format!("Identifier Field ID {} not found", id),
+                        format!("Identifier Field ID {id} not found"),
                     )
                 })
             })
@@ -142,7 +142,7 @@ impl ReassignFieldIds {
                     .ok_or_else(|| {
                         Error::new(
                             ErrorKind::DataInvalid,
-                            format!("Field with id {} for alias {} not found", 
id, name),
+                            format!("Field with id {id} for alias {name} not 
found"),
                         )
                     })
                     .map(|new_id| (name, new_id))
diff --git a/crates/iceberg/src/spec/schema/mod.rs 
b/crates/iceberg/src/spec/schema/mod.rs
index 655462998..7080b6e70 100644
--- a/crates/iceberg/src/spec/schema/mod.rs
+++ b/crates/iceberg/src/spec/schema/mod.rs
@@ -425,7 +425,7 @@ impl Display for Schema {
     fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
         writeln!(f, "table {{")?;
         for field in self.as_struct().fields() {
-            writeln!(f, "  {}", field)?;
+            writeln!(f, "  {field}")?;
         }
         writeln!(f, "}}")
     }
@@ -731,8 +731,7 @@ table {
             assert_eq!(
                 Some(name),
                 schema.name_by_field_id(id),
-                "Column name for field id {} not match.",
-                id
+                "Column name for field id {id} not match."
             );
         }
     }
@@ -754,8 +753,7 @@ table {
             assert_eq!(
                 Some(name),
                 schema.name_by_field_id(id),
-                "Column name for field id {} not match.",
-                id
+                "Column name for field id {id} not match."
             );
         }
     }
@@ -956,8 +954,7 @@ table {
             assert_eq!(
                 Some(&field),
                 schema.field_by_id(id).map(|f| f.as_ref()),
-                "Field for {} not match.",
-                id
+                "Field for {id} not match."
             );
         }
     }
diff --git a/crates/iceberg/src/spec/schema/utils.rs 
b/crates/iceberg/src/spec/schema/utils.rs
index 37e683344..34ee39b09 100644
--- a/crates/iceberg/src/spec/schema/utils.rs
+++ b/crates/iceberg/src/spec/schema/utils.rs
@@ -25,10 +25,7 @@ pub fn try_insert_field<V>(map: &mut HashMap<i32, V>, 
field_id: i32, value: V) -
         |_| {
             Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!(
-                    "Found duplicate 'field.id' {}. Field ids must be unique.",
-                    field_id
-                ),
+                format!("Found duplicate 'field.id' {field_id}. Field ids must 
be unique."),
             ))
         },
     )
diff --git a/crates/iceberg/src/spec/snapshot.rs 
b/crates/iceberg/src/spec/snapshot.rs
index 04a9e15b3..809bf0998 100644
--- a/crates/iceberg/src/spec/snapshot.rs
+++ b/crates/iceberg/src/spec/snapshot.rs
@@ -166,7 +166,7 @@ impl Snapshot {
                 .ok_or_else(|| {
                     Error::new(
                         ErrorKind::DataInvalid,
-                        format!("Schema with id {} not found", schema_id),
+                        format!("Schema with id {schema_id} not found"),
                     )
                 })?
                 .clone(),
diff --git a/crates/iceberg/src/spec/snapshot_summary.rs 
b/crates/iceberg/src/spec/snapshot_summary.rs
index 1b07ce3fa..a9dd56998 100644
--- a/crates/iceberg/src/spec/snapshot_summary.rs
+++ b/crates/iceberg/src/spec/snapshot_summary.rs
@@ -824,9 +824,9 @@ mod tests {
         assert!(props.contains_key(&partition_key));
 
         let partition_summary = props.get(&partition_key).unwrap();
-        assert!(partition_summary.contains(&format!("{}=200", 
ADDED_FILE_SIZE)));
-        assert!(partition_summary.contains(&format!("{}=1", 
ADDED_DATA_FILES)));
-        assert!(partition_summary.contains(&format!("{}=20", ADDED_RECORDS)));
+        assert!(partition_summary.contains(&format!("{ADDED_FILE_SIZE}=200")));
+        assert!(partition_summary.contains(&format!("{ADDED_DATA_FILES}=1")));
+        assert!(partition_summary.contains(&format!("{ADDED_RECORDS}=20")));
     }
 
     #[test]
diff --git a/crates/iceberg/src/spec/table_metadata.rs 
b/crates/iceberg/src/spec/table_metadata.rs
index ca298f308..a98dc4f43 100644
--- a/crates/iceberg/src/spec/table_metadata.rs
+++ b/crates/iceberg/src/spec/table_metadata.rs
@@ -315,7 +315,7 @@ impl TableMetadata {
     pub fn snapshot_for_ref(&self, ref_name: &str) -> Option<&SnapshotRef> {
         self.refs.get(ref_name).map(|r| {
             self.snapshot_by_id(r.snapshot_id)
-                .unwrap_or_else(|| panic!("Snapshot id of ref {} doesn't 
exist", ref_name))
+                .unwrap_or_else(|| panic!("Snapshot id of ref {ref_name} 
doesn't exist"))
         })
     }
 
@@ -513,8 +513,7 @@ impl TableMetadata {
                 return Err(Error::new(
                     ErrorKind::DataInvalid,
                     format!(
-                        "Snapshot for current snapshot id {} does not exist in 
the existing snapshots list",
-                        current_snapshot_id
+                        "Snapshot for current snapshot id 
{current_snapshot_id} does not exist in the existing snapshots list"
                     ),
                 ));
             }
@@ -956,10 +955,7 @@ pub(super) mod _serde {
                         .ok_or_else(|| {
                             Error::new(
                                 ErrorKind::DataInvalid,
-                                format!(
-                                    "No schema exists with the current schema 
id {}.",
-                                    schema_id
-                                ),
+                                format!("No schema exists with the current 
schema id {schema_id}."),
                             )
                         })?
                         .clone();
@@ -1346,7 +1342,7 @@ mod tests {
     }
 
     fn get_test_table_metadata(file_name: &str) -> TableMetadata {
-        let path = format!("testdata/table_metadata/{}", file_name);
+        let path = format!("testdata/table_metadata/{file_name}");
         let metadata: String = fs::read_to_string(path).unwrap();
 
         serde_json::from_str(&metadata).unwrap()
@@ -2118,7 +2114,6 @@ mod tests {
     "#;
 
         let err = serde_json::from_str::<TableMetadata>(data).unwrap_err();
-        println!("{}", err);
         assert!(err.to_string().contains(
             "Invalid snapshot with id 3055729675574597004 and sequence number 
4 greater than last sequence number 1"
         ));
@@ -2805,8 +2800,7 @@ mod tests {
         let error_message = desered.unwrap_err().to_string();
         assert!(
             error_message.contains("No valid schema configuration found"),
-            "Expected error about no valid schema configuration, got: {}",
-            error_message
+            "Expected error about no valid schema configuration, got: 
{error_message}"
         );
     }
 
@@ -3033,7 +3027,7 @@ mod tests {
         let original_metadata: TableMetadata = 
get_test_table_metadata("TableMetadataV2Valid.json");
 
         // Define the metadata location
-        let metadata_location = format!("{}/metadata.json", temp_path);
+        let metadata_location = format!("{temp_path}/metadata.json");
 
         // Write the metadata
         original_metadata
diff --git a/crates/iceberg/src/spec/table_metadata_builder.rs 
b/crates/iceberg/src/spec/table_metadata_builder.rs
index 7881ebea4..25af8c30e 100644
--- a/crates/iceberg/src/spec/table_metadata_builder.rs
+++ b/crates/iceberg/src/spec/table_metadata_builder.rs
@@ -653,10 +653,7 @@ impl TableMetadataBuilder {
         let _schema = self.metadata.schemas.get(&schema_id).ok_or_else(|| {
             Error::new(
                 ErrorKind::DataInvalid,
-                format!(
-                    "Cannot set current schema to unknown schema with id: 
'{}'",
-                    schema_id
-                ),
+                format!("Cannot set current schema to unknown schema with id: 
'{schema_id}'"),
             )
         })?;
 
@@ -706,9 +703,8 @@ impl TableMetadataBuilder {
                 return Err(Error::new(
                     ErrorKind::DataInvalid,
                     format!(
-                        "Cannot add schema field '{}' because it conflicts 
with existing partition field name. \
-                         Schema evolution cannot introduce field names that 
match existing partition field names.",
-                        field_name
+                        "Cannot add schema field '{field_name}' because it 
conflicts with existing partition field name. \
+                         Schema evolution cannot introduce field names that 
match existing partition field names."
                     ),
                 ));
             }
@@ -1209,8 +1205,7 @@ impl TableMetadataBuilder {
                            Error::new(
                                ErrorKind::Unexpected,
                                format!(
-                                   "Cannot find source column with name {} for 
sort column in re-assigned schema.",
-                                   source_field_name
+                                   "Cannot find source column with name 
{source_field_name} for sort column in re-assigned schema."
                                ),
                            )
                        })?.id;
diff --git a/crates/iceberg/src/spec/table_properties.rs 
b/crates/iceberg/src/spec/table_properties.rs
index 9aa789fed..497545601 100644
--- a/crates/iceberg/src/spec/table_properties.rs
+++ b/crates/iceberg/src/spec/table_properties.rs
@@ -30,7 +30,7 @@ where
     properties.get(key).map_or(Ok(default), |value| {
         value
             .parse::<T>()
-            .map_err(|e| anyhow::anyhow!("Invalid value for {}: {}", key, e))
+            .map_err(|e| anyhow::anyhow!("Invalid value for {key}: {e}"))
     })
 }
 
diff --git a/crates/iceberg/src/spec/transform.rs 
b/crates/iceberg/src/spec/transform.rs
index d69d15c28..6068716ef 100644
--- a/crates/iceberg/src/spec/transform.rs
+++ b/crates/iceberg/src/spec/transform.rs
@@ -894,7 +894,7 @@ impl Transform {
         ) {
             return Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!("Expected a numeric literal, got: {:?}", boundary),
+                format!("Expected a numeric literal, got: {boundary:?}"),
             ));
         }
 
diff --git a/crates/iceberg/src/spec/values.rs 
b/crates/iceberg/src/spec/values.rs
index 8d56604b1..d06e754dc 100644
--- a/crates/iceberg/src/spec/values.rs
+++ b/crates/iceberg/src/spec/values.rs
@@ -351,11 +351,11 @@ impl PartialOrd for Datum {
 impl Display for Datum {
     fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
         match (&self.r#type, &self.literal) {
-            (_, PrimitiveLiteral::Boolean(val)) => write!(f, "{}", val),
-            (PrimitiveType::Int, PrimitiveLiteral::Int(val)) => write!(f, 
"{}", val),
-            (PrimitiveType::Long, PrimitiveLiteral::Long(val)) => write!(f, 
"{}", val),
-            (_, PrimitiveLiteral::Float(val)) => write!(f, "{}", val),
-            (_, PrimitiveLiteral::Double(val)) => write!(f, "{}", val),
+            (_, PrimitiveLiteral::Boolean(val)) => write!(f, "{val}"),
+            (PrimitiveType::Int, PrimitiveLiteral::Int(val)) => write!(f, 
"{val}"),
+            (PrimitiveType::Long, PrimitiveLiteral::Long(val)) => write!(f, 
"{val}"),
+            (_, PrimitiveLiteral::Float(val)) => write!(f, "{val}"),
+            (_, PrimitiveLiteral::Double(val)) => write!(f, "{val}"),
             (PrimitiveType::Date, PrimitiveLiteral::Int(val)) => {
                 write!(f, "{}", days_to_date(*val))
             }
@@ -374,7 +374,7 @@ impl Display for Datum {
             (PrimitiveType::TimestamptzNs, PrimitiveLiteral::Long(val)) => {
                 write!(f, "{}", nanoseconds_to_datetimetz(*val))
             }
-            (_, PrimitiveLiteral::String(val)) => write!(f, r#""{}""#, val),
+            (_, PrimitiveLiteral::String(val)) => write!(f, r#""{val}""#),
             (PrimitiveType::Uuid, PrimitiveLiteral::UInt128(val)) => {
                 write!(f, "{}", Uuid::from_u128(*val))
             }
@@ -398,7 +398,7 @@ impl Display for Datum {
 fn display_bytes(bytes: &[u8], f: &mut Formatter<'_>) -> std::fmt::Result {
     let mut s = String::with_capacity(bytes.len() * 2);
     for b in bytes {
-        s.push_str(&format!("{:02X}", b));
+        s.push_str(&format!("{b:02X}"));
     }
     f.write_str(&s)
 }
@@ -482,7 +482,7 @@ impl Datum {
                 
PrimitiveLiteral::Int128(unscaled_value.to_i128().ok_or_else(|| {
                     Error::new(
                         ErrorKind::DataInvalid,
-                        format!("Can't convert bytes to i128: {:?}", bytes),
+                        format!("Can't convert bytes to i128: {bytes:?}"),
                     )
                 })?)
             }
@@ -526,8 +526,7 @@ impl Datum {
                     return Err(Error::new(
                         ErrorKind::DataInvalid,
                         format!(
-                            "PrimitiveType Decimal must has valid precision 
but got {}",
-                            precision
+                            "PrimitiveType Decimal must has valid precision 
but got {precision}"
                         ),
                     ));
                 };
@@ -1134,10 +1133,7 @@ impl Datum {
         if actual_bytes.len() > available_bytes {
             return Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!(
-                    "Decimal value {} is too large for precision {}",
-                    decimal, precision
-                ),
+                format!("Decimal value {decimal} is too large for precision 
{precision}"),
             ));
         }
 
@@ -1933,10 +1929,7 @@ impl Literal {
                 (_, JsonValue::Null) => Ok(None),
                 (i, j) => Err(Error::new(
                     crate::ErrorKind::DataInvalid,
-                    format!(
-                        "The json value {} doesn't fit to the iceberg type 
{}.",
-                        j, i
-                    ),
+                    format!("The json value {j} doesn't fit to the iceberg 
type {i}."),
                 )),
             },
             Type::Struct(schema) => {
@@ -2081,7 +2074,7 @@ impl Literal {
                 (_, PrimitiveLiteral::Binary(val)) => 
Ok(JsonValue::String(val.iter().fold(
                     String::new(),
                     |mut acc, x| {
-                        acc.push_str(&format!("{:x}", x));
+                        acc.push_str(&format!("{x:x}"));
                         acc
                     },
                 ))),
@@ -2139,10 +2132,7 @@ impl Literal {
             }
             (value, r#type) => Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!(
-                    "The iceberg value {:?} doesn't fit to the iceberg type 
{}.",
-                    value, r#type
-                ),
+                format!("The iceberg value {value:?} doesn't fit to the 
iceberg type {type}."),
             )),
         }
     }
@@ -2515,7 +2505,7 @@ mod _serde {
                     } else {
                         return Err(Error::new(
                             ErrorKind::DataInvalid,
-                            format!("Type {} should be a struct", ty),
+                            format!("Type {ty} should be a struct"),
                         ));
                     }
                     RawLiteralEnum::Record(Record { required, optional })
@@ -2538,7 +2528,7 @@ mod _serde {
                     } else {
                         return Err(Error::new(
                             ErrorKind::DataInvalid,
-                            format!("Type {} should be a list", ty),
+                            format!("Type {ty} should be a list"),
                         ));
                     }
                 }
@@ -2606,7 +2596,7 @@ mod _serde {
                     } else {
                         return Err(Error::new(
                             ErrorKind::DataInvalid,
-                            format!("Type {} should be a map", ty),
+                            format!("Type {ty} should be a map"),
                         ));
                     }
                 }
@@ -2619,8 +2609,7 @@ mod _serde {
                 Error::new(
                     ErrorKind::DataInvalid,
                     format!(
-                        "Unable to convert raw literal ({}) fail convert to 
type {} for: type mismatch",
-                        v, ty
+                        "Unable to convert raw literal ({v}) fail convert to 
type {ty} for: type mismatch"
                     ),
                 )
             };
@@ -2628,8 +2617,7 @@ mod _serde {
                 Error::new(
                     ErrorKind::DataInvalid,
                     format!(
-                        "Unable to convert raw literal ({}) fail convert to 
type {} for: {}",
-                        v, ty, reason
+                        "Unable to convert raw literal ({v}) fail convert to 
type {ty} for: {reason}"
                     ),
                 )
             };
@@ -3301,7 +3289,7 @@ mod tests {
                 Decimal::new(decimal_num, expect_scale),
                 expect_precision,
             );
-            assert!(result.is_err(), "expect error but got {:?}", result);
+            assert!(result.is_err(), "expect error but got {result:?}");
             assert_eq!(
                 result.unwrap_err().kind(),
                 ErrorKind::DataInvalid,
diff --git a/crates/iceberg/src/spec/view_metadata.rs 
b/crates/iceberg/src/spec/view_metadata.rs
index dafca4190..3139c0408 100644
--- a/crates/iceberg/src/spec/view_metadata.rs
+++ b/crates/iceberg/src/spec/view_metadata.rs
@@ -186,7 +186,7 @@ impl ViewMetadata {
         if !self.schemas.contains_key(&schema_id) {
             return Err(Error::new(
                 ErrorKind::DataInvalid,
-                format!("No schema exists with the schema id {}.", schema_id),
+                format!("No schema exists with the schema id {schema_id}."),
             ));
         }
         Ok(())
@@ -426,7 +426,7 @@ pub(crate) mod tests {
     }
 
     pub(crate) fn get_test_view_metadata(file_name: &str) -> ViewMetadata {
-        let path = format!("testdata/view_metadata/{}", file_name);
+        let path = format!("testdata/view_metadata/{file_name}");
         let metadata: String = fs::read_to_string(path).unwrap();
 
         serde_json::from_str(&metadata).unwrap()
diff --git a/crates/iceberg/src/spec/view_metadata_builder.rs 
b/crates/iceberg/src/spec/view_metadata_builder.rs
index dc5c10402..9f542a7c6 100644
--- a/crates/iceberg/src/spec/view_metadata_builder.rs
+++ b/crates/iceberg/src/spec/view_metadata_builder.rs
@@ -200,10 +200,7 @@ impl ViewMetadataBuilder {
         let version = self.metadata.versions.get(&version_id).ok_or_else(|| {
             Error::new(
                 ErrorKind::DataInvalid,
-                format!(
-                    "Cannot set current version to unknown version with id: 
{}",
-                    version_id
-                ),
+                format!("Cannot set current version to unknown version with 
id: {version_id}"),
             )
         })?;
 
@@ -433,8 +430,7 @@ impl ViewMetadataBuilder {
             return Err(Error::new(
                 ErrorKind::DataInvalid,
                 format!(
-                    "{} must be positive but was {}",
-                    VIEW_PROPERTY_VERSION_HISTORY_SIZE, num_versions_to_keep
+                    "{VIEW_PROPERTY_VERSION_HISTORY_SIZE} must be positive but 
was {num_versions_to_keep}"
                 ),
             ));
         }
diff --git a/crates/iceberg/src/transaction/update_properties.rs 
b/crates/iceberg/src/transaction/update_properties.rs
index 825d6bec8..d85072555 100644
--- a/crates/iceberg/src/transaction/update_properties.rs
+++ b/crates/iceberg/src/transaction/update_properties.rs
@@ -87,10 +87,7 @@ impl TransactionAction for UpdatePropertiesAction {
         {
             return Err(Error::new(
                 ErrorKind::PreconditionFailed,
-                format!(
-                    "Key {} is present in both removal set and update set",
-                    overlapping_key
-                ),
+                format!("Key {overlapping_key} is present in both removal set 
and update set"),
             ));
         }
 
diff --git a/crates/iceberg/src/transform/mod.rs 
b/crates/iceberg/src/transform/mod.rs
index 809d2dafe..f870a0872 100644
--- a/crates/iceberg/src/transform/mod.rs
+++ b/crates/iceberg/src/transform/mod.rs
@@ -44,7 +44,7 @@ pub trait TransformFunction: Send + Sync + Debug {
         self.transform_literal(input)?.ok_or_else(|| {
             Error::new(
                 ErrorKind::Unexpected,
-                format!("Returns 'None' for literal {}", input),
+                format!("Returns 'None' for literal {input}"),
             )
         })
     }
@@ -171,10 +171,7 @@ mod test {
                 assert_eq!(
                     satisfies_order_of,
                     &trans.satisfies_order_of(other_trans),
-                    "Failed to check satisfies order {}, {}, {}",
-                    trans,
-                    other_trans,
-                    satisfies_order_of
+                    "Failed to check satisfies order {trans}, {other_trans}, 
{satisfies_order_of}"
                 );
             }
 
@@ -182,8 +179,7 @@ mod test {
                 let actual = trans.result_type(input_type).ok();
                 assert_eq!(
                     result_type, &actual,
-                    "type mismatch at index {}, input: {}, expected: {:?}, 
actual: {:?}",
-                    i, input_type, result_type, actual
+                    "type mismatch at index {i}, input: {input_type}, 
expected: {result_type:?}, actual: {actual:?}"
                 );
             }
         }
diff --git a/crates/iceberg/src/transform/temporal.rs 
b/crates/iceberg/src/transform/temporal.rs
index 3b333d7ba..d0a0da249 100644
--- a/crates/iceberg/src/transform/temporal.rs
+++ b/crates/iceberg/src/transform/temporal.rs
@@ -230,10 +230,7 @@ impl Day {
         let delta = Duration::new(secs, nanos).ok_or_else(|| {
             Error::new(
                 ErrorKind::DataInvalid,
-                format!(
-                    "Failed to create 'TimeDelta' from seconds {} and nanos 
{}",
-                    secs, nanos
-                ),
+                format!("Failed to create 'TimeDelta' from seconds {secs} and 
nanos {nanos}"),
             )
         })?;
 
@@ -259,10 +256,7 @@ impl Day {
         let delta = Duration::new(secs, nanos).ok_or_else(|| {
             Error::new(
                 ErrorKind::DataInvalid,
-                format!(
-                    "Failed to create 'TimeDelta' from seconds {} and nanos 
{}",
-                    secs, nanos
-                ),
+                format!("Failed to create 'TimeDelta' from seconds {secs} and 
nanos {nanos}"),
             )
         })?;
 
diff --git a/crates/iceberg/src/transform/truncate.rs 
b/crates/iceberg/src/transform/truncate.rs
index d712df518..84ef7c0da 100644
--- a/crates/iceberg/src/transform/truncate.rs
+++ b/crates/iceberg/src/transform/truncate.rs
@@ -322,7 +322,7 @@ mod test {
         fixture.assert_projection(
             &fixture.set_predicate(PredicateOperator::In, vec![
                 Datum::string(value),
-                Datum::string(format!("{}abc", value)),
+                Datum::string(format!("{value}abc")),
             ]),
             Some(r#"name IN ("abcde")"#),
         )?;
@@ -330,7 +330,7 @@ mod test {
         fixture.assert_projection(
             &fixture.set_predicate(PredicateOperator::NotIn, vec![
                 Datum::string(value),
-                Datum::string(format!("{}abc", value)),
+                Datum::string(format!("{value}abc")),
             ]),
             None,
         )?;
diff --git a/crates/iceberg/src/writer/base_writer/data_file_writer.rs 
b/crates/iceberg/src/writer/base_writer/data_file_writer.rs
index b9de4efda..dcaa56cc9 100644
--- a/crates/iceberg/src/writer/base_writer/data_file_writer.rs
+++ b/crates/iceberg/src/writer/base_writer/data_file_writer.rs
@@ -101,7 +101,7 @@ where
                     res.build().map_err(|e| {
                         Error::new(
                             ErrorKind::DataInvalid,
-                            format!("Failed to build data file: {}", e),
+                            format!("Failed to build data file: {e}"),
                         )
                     })
                 })
diff --git a/crates/iceberg/src/writer/base_writer/equality_delete_writer.rs 
b/crates/iceberg/src/writer/base_writer/equality_delete_writer.rs
index bd96c80bb..664ea8433 100644
--- a/crates/iceberg/src/writer/base_writer/equality_delete_writer.rs
+++ b/crates/iceberg/src/writer/base_writer/equality_delete_writer.rs
@@ -181,7 +181,7 @@ where
                     res.build().map_err(|e| {
                         Error::new(
                             ErrorKind::DataInvalid,
-                            format!("Failed to build data file: {}", e),
+                            format!("Failed to build data file: {e}"),
                         )
                     })
                 })
diff --git a/crates/iceberg/src/writer/file_writer/location_generator.rs 
b/crates/iceberg/src/writer/file_writer/location_generator.rs
index 4a73fc148..4cfc27844 100644
--- a/crates/iceberg/src/writer/file_writer/location_generator.rs
+++ b/crates/iceberg/src/writer/file_writer/location_generator.rs
@@ -63,7 +63,7 @@ impl DefaultLocationGenerator {
         let data_location = if let Some(data_location) = 
configured_data_location {
             data_location.clone()
         } else {
-            format!("{}{}", table_location, DEFAULT_DATA_DIR)
+            format!("{table_location}{DEFAULT_DATA_DIR}")
         };
         Ok(Self { data_location })
     }
@@ -114,7 +114,7 @@ impl DefaultFileNameGenerator {
     /// Create a new `FileNameGenerator`.
     pub fn new(prefix: String, suffix: Option<String>, format: DataFileFormat) 
-> Self {
         let suffix = if let Some(suffix) = suffix {
-            format!("-{}", suffix)
+            format!("-{suffix}")
         } else {
             "".to_string()
         };
diff --git a/crates/iceberg/src/writer/file_writer/parquet_writer.rs 
b/crates/iceberg/src/writer/file_writer/parquet_writer.rs
index 620f27df3..3e9d1715c 100644
--- a/crates/iceberg/src/writer/file_writer/parquet_writer.rs
+++ b/crates/iceberg/src/writer/file_writer/parquet_writer.rs
@@ -274,10 +274,7 @@ impl MinMaxColAggregator {
         let Type::Primitive(ty) = ty.clone() else {
             return Err(Error::new(
                 ErrorKind::Unexpected,
-                format!(
-                    "Composed type {} is not supported for min max 
aggregation.",
-                    ty
-                ),
+                format!("Composed type {ty} is not supported for min max 
aggregation."),
             ));
         };
 
@@ -285,7 +282,7 @@ impl MinMaxColAggregator {
             let Some(min_datum) = get_parquet_stat_min_as_datum(&ty, &value)? 
else {
                 return Err(Error::new(
                     ErrorKind::Unexpected,
-                    format!("Statistics {} is not match with field type {}.", 
value, ty),
+                    format!("Statistics {value} is not match with field type 
{ty}."),
                 ));
             };
 
@@ -296,7 +293,7 @@ impl MinMaxColAggregator {
             let Some(max_datum) = get_parquet_stat_max_as_datum(&ty, &value)? 
else {
                 return Err(Error::new(
                     ErrorKind::Unexpected,
-                    format!("Statistics {} is not match with field type {}.", 
value, ty),
+                    format!("Statistics {value} is not match with field type 
{ty}."),
                 ));
             };
 
@@ -333,7 +330,7 @@ impl ParquetWriter {
             let parquet_metadata = 
parquet_reader.get_metadata(None).await.map_err(|err| {
                 Error::new(
                     ErrorKind::DataInvalid,
-                    format!("Error reading Parquet metadata: {}", err),
+                    format!("Error reading Parquet metadata: {err}"),
                 )
             })?;
             let mut builder = ParquetWriter::parquet_to_data_file_builder(
diff --git a/crates/iceberg/src/writer/file_writer/rolling_writer.rs 
b/crates/iceberg/src/writer/file_writer/rolling_writer.rs
index b68010304..8f0365478 100644
--- a/crates/iceberg/src/writer/file_writer/rolling_writer.rs
+++ b/crates/iceberg/src/writer/file_writer/rolling_writer.rs
@@ -436,8 +436,7 @@ mod tests {
         let total_records: u64 = data_files.iter().map(|file| 
file.record_count).sum();
         assert_eq!(
             total_records, expected_rows as u64,
-            "Expected {} total records across all files",
-            expected_rows
+            "Expected {expected_rows} total records across all files"
         );
 
         Ok(())
diff --git a/crates/iceberg/src/writer/partitioning/clustered_writer.rs 
b/crates/iceberg/src/writer/partitioning/clustered_writer.rs
index c141b212f..358772396 100644
--- a/crates/iceberg/src/writer/partitioning/clustered_writer.rs
+++ b/crates/iceberg/src/writer/partitioning/clustered_writer.rs
@@ -101,8 +101,7 @@ where
             return Err(Error::new(
                 ErrorKind::Unexpected,
                 format!(
-                    "The input is not sorted! Cannot write to partition that 
was previously closed: {:?}",
-                    partition_key
+                    "The input is not sorted! Cannot write to partition that 
was previously closed: {partition_key:?}"
                 ),
             ));
         }
@@ -511,8 +510,7 @@ mod tests {
         let error = result.unwrap_err();
         assert!(
             error.to_string().contains("The input is not sorted"),
-            "Expected 'input is not sorted' error, got: {}",
-            error
+            "Expected 'input is not sorted' error, got: {error}"
         );
 
         Ok(())
diff --git a/crates/iceberg/tests/file_io_s3_test.rs 
b/crates/iceberg/tests/file_io_s3_test.rs
index b7c484de8..b04412832 100644
--- a/crates/iceberg/tests/file_io_s3_test.rs
+++ b/crates/iceberg/tests/file_io_s3_test.rs
@@ -60,7 +60,7 @@ mod tests {
 
         FileIOBuilder::new("s3")
             .with_props(vec![
-                (S3_ENDPOINT, format!("http://{}";, minio_socket_addr)),
+                (S3_ENDPOINT, format!("http://{minio_socket_addr}";)),
                 (S3_ACCESS_KEY_ID, "admin".to_string()),
                 (S3_SECRET_ACCESS_KEY, "password".to_string()),
                 (S3_REGION, "us-east-1".to_string()),
@@ -208,7 +208,7 @@ mod tests {
         let file_io_with_custom_creds = FileIOBuilder::new("s3")
             .with_extension(custom_loader)
             .with_props(vec![
-                (S3_ENDPOINT, format!("http://{}";, minio_socket_addr)),
+                (S3_ENDPOINT, format!("http://{minio_socket_addr}";)),
                 (S3_REGION, "us-east-1".to_string()),
             ])
             .build()
@@ -237,7 +237,7 @@ mod tests {
         let file_io_with_custom_creds = FileIOBuilder::new("s3")
             .with_extension(custom_loader)
             .with_props(vec![
-                (S3_ENDPOINT, format!("http://{}";, minio_socket_addr)),
+                (S3_ENDPOINT, format!("http://{minio_socket_addr}";)),
                 (S3_REGION, "us-east-1".to_string()),
             ])
             .build()
diff --git a/crates/integration_tests/src/lib.rs 
b/crates/integration_tests/src/lib.rs
index 422cd5586..44f6c3024 100644
--- a/crates/integration_tests/src/lib.rs
+++ b/crates/integration_tests/src/lib.rs
@@ -46,7 +46,7 @@ pub fn set_test_fixture(func: &str) -> TestFixture {
     let catalog_config = HashMap::from([
         (
             REST_CATALOG_PROP_URI.to_string(),
-            format!("http://{}:{}";, rest_catalog_ip, REST_CATALOG_PORT),
+            format!("http://{rest_catalog_ip}:{REST_CATALOG_PORT}";),
         ),
         (
             S3_ENDPOINT.to_string(),
diff --git 
a/crates/integration_tests/tests/shared_tests/read_positional_deletes.rs 
b/crates/integration_tests/tests/shared_tests/read_positional_deletes.rs
index 764181007..318b06bf0 100644
--- a/crates/integration_tests/tests/shared_tests/read_positional_deletes.rs
+++ b/crates/integration_tests/tests/shared_tests/read_positional_deletes.rs
@@ -40,7 +40,7 @@ async fn test_read_table_with_positional_deletes() {
         .unwrap();
 
     let scan = table.scan().build().unwrap();
-    println!("{:?}", scan);
+    println!("{scan:?}");
 
     let plan: Vec<_> = scan
         .plan_files()
@@ -49,7 +49,7 @@ async fn test_read_table_with_positional_deletes() {
         .try_collect()
         .await
         .unwrap();
-    println!("{:?}", plan);
+    println!("{plan:?}");
 
     // Scan plan phase should include delete files in file plan
     // when with_delete_file_processing_enabled == true
diff --git a/crates/integrations/datafusion/src/error.rs 
b/crates/integrations/datafusion/src/error.rs
index 273d92fa6..5bc47013d 100644
--- a/crates/integrations/datafusion/src/error.rs
+++ b/crates/integrations/datafusion/src/error.rs
@@ -24,7 +24,7 @@ pub fn from_datafusion_error(error: 
datafusion::error::DataFusionError) -> Error
         ErrorKind::Unexpected,
         "Operation failed for hitting datafusion error".to_string(),
     )
-    .with_source(anyhow!("datafusion error: {:?}", error))
+    .with_source(anyhow!("datafusion error: {error:?}"))
 }
 /// Converts an iceberg error into a datafusion error.
 pub fn to_datafusion_error(error: Error) -> datafusion::error::DataFusionError 
{
diff --git a/crates/integrations/datafusion/src/physical_plan/commit.rs 
b/crates/integrations/datafusion/src/physical_plan/commit.rs
index 6bba89fce..f876908ae 100644
--- a/crates/integrations/datafusion/src/physical_plan/commit.rs
+++ b/crates/integrations/datafusion/src/physical_plan/commit.rs
@@ -176,8 +176,7 @@ impl ExecutionPlan for IcebergCommitExec {
         // IcebergCommitExec only has one partition (partition 0)
         if partition != 0 {
             return Err(DataFusionError::Internal(format!(
-                "IcebergCommitExec only has one partition, but got partition 
{}",
-                partition
+                "IcebergCommitExec only has one partition, but got partition 
{partition}"
             )));
         }
 
diff --git a/crates/integrations/datafusion/src/physical_plan/scan.rs 
b/crates/integrations/datafusion/src/physical_plan/scan.rs
index d4751a19c..be92e93d2 100644
--- a/crates/integrations/datafusion/src/physical_plan/scan.rs
+++ b/crates/integrations/datafusion/src/physical_plan/scan.rs
@@ -167,7 +167,7 @@ impl DisplayAs for IcebergTableScan {
                 .map_or(String::new(), |v| v.join(",")),
             self.predicates
                 .clone()
-                .map_or(String::from(""), |p| format!("{}", p))
+                .map_or(String::from(""), |p| format!("{p}"))
         )
     }
 }
diff --git a/crates/integrations/datafusion/src/physical_plan/write.rs 
b/crates/integrations/datafusion/src/physical_plan/write.rs
index e46019c54..b9d1f02d1 100644
--- a/crates/integrations/datafusion/src/physical_plan/write.rs
+++ b/crates/integrations/datafusion/src/physical_plan/write.rs
@@ -232,10 +232,7 @@ impl ExecutionPlan for IcebergWriteExec {
         if file_format != DataFileFormat::Parquet {
             return Err(to_datafusion_error(Error::new(
                 ErrorKind::FeatureUnsupported,
-                format!(
-                    "File format {} is not supported for insert_into yet!",
-                    file_format
-                ),
+                format!("File format {file_format} is not supported for 
insert_into yet!"),
             )));
         }
 
@@ -515,7 +512,7 @@ mod tests {
             .map_err(|e| {
                 Error::new(
                     ErrorKind::Unexpected,
-                    format!("Failed to create record batch: {}", e),
+                    format!("Failed to create record batch: {e}"),
                 )
             })?;
 
@@ -532,7 +529,7 @@ mod tests {
         let stream = write_exec.execute(0, task_ctx).map_err(|e| {
             Error::new(
                 ErrorKind::Unexpected,
-                format!("Failed to execute plan: {}", e),
+                format!("Failed to execute plan: {e}"),
             )
         })?;
 
@@ -541,7 +538,7 @@ mod tests {
         let mut stream = stream;
         while let Some(batch) = stream.next().await {
             results.push(batch.map_err(|e| {
-                Error::new(ErrorKind::Unexpected, format!("Failed to get 
batch: {}", e))
+                Error::new(ErrorKind::Unexpected, format!("Failed to get 
batch: {e}"))
             })?);
         }
 
diff --git a/crates/integrations/playground/src/main.rs 
b/crates/integrations/playground/src/main.rs
index 6f3de1bc1..c52220995 100644
--- a/crates/integrations/playground/src/main.rs
+++ b/crates/integrations/playground/src/main.rs
@@ -76,7 +76,7 @@ async fn main_inner() -> anyhow::Result<()> {
     let args = Args::parse();
 
     if !args.quiet {
-        println!("ICEBERG PLAYGROUND v{}", ICEBERG_PLAYGROUND_VERSION);
+        println!("ICEBERG PLAYGROUND v{ICEBERG_PLAYGROUND_VERSION}");
     }
 
     let session_config = 
SessionConfig::from_env()?.with_information_schema(true);
diff --git a/crates/sqllogictest/src/engine/mod.rs 
b/crates/sqllogictest/src/engine/mod.rs
index f8a405a7c..724359fbe 100644
--- a/crates/sqllogictest/src/engine/mod.rs
+++ b/crates/sqllogictest/src/engine/mod.rs
@@ -39,7 +39,7 @@ pub async fn load_engine_runner(
 ) -> Result<Box<dyn EngineRunner>> {
     match engine_type {
         TYPE_DATAFUSION => Ok(Box::new(DataFusionEngine::new(cfg).await?)),
-        _ => Err(anyhow::anyhow!("Unsupported engine type: {}", 
engine_type).into()),
+        _ => Err(anyhow::anyhow!("Unsupported engine type: 
{engine_type}").into()),
     }
 }
 

Reply via email to