This is an automated email from the ASF dual-hosted git repository.

tustvold pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-datafusion.git


The following commit(s) were added to refs/heads/main by this push:
     new e3679e225 Update to arrow `33.0.0` (#5241)
e3679e225 is described below

commit e3679e2254a075c27c31c0f950d760a88c2cd1c5
Author: Raphael Taylor-Davies <[email protected]>
AuthorDate: Mon Feb 20 17:27:08 2023 +0000

    Update to arrow `33.0.0` (#5241)
    
    * Update arrow 33
    
    * Fix test
    
    * Fix avro
    
    * Update pin
    
    * Format
    
    * Further fixes
    
    * Remove pin
---
 benchmarks/Cargo.toml                              |   4 +-
 datafusion-cli/Cargo.lock                          |  60 +++++------
 datafusion-cli/Cargo.toml                          |   4 +-
 datafusion-examples/Cargo.toml                     |   6 +-
 datafusion/common/Cargo.toml                       |   4 +-
 datafusion/core/Cargo.toml                         |   4 +-
 datafusion/core/src/datasource/file_format/avro.rs |   8 +-
 datafusion/core/src/execution/context.rs           |   2 +-
 datafusion/core/src/physical_optimizer/pruning.rs  |   2 +-
 .../core/src/physical_plan/aggregates/mod.rs       |  72 +++++++-------
 .../core/src/physical_plan/file_format/json.rs     |  16 +--
 .../src/physical_plan/joins/sort_merge_join.rs     |  14 +--
 datafusion/core/tests/dataframe.rs                 | 110 ++++++++++-----------
 datafusion/core/tests/dataframe_functions.rs       |   8 +-
 datafusion/core/tests/sql/aggregates.rs            |   8 +-
 datafusion/core/tests/sql/expr.rs                  |  60 +++++------
 datafusion/core/tests/sql/functions.rs             |  16 +--
 datafusion/core/tests/sql/group_by.rs              |  18 ++--
 datafusion/core/tests/sql/joins.rs                 | 106 ++++++++++----------
 datafusion/core/tests/sql/json.rs                  |   8 +-
 datafusion/core/tests/sql/select.rs                |  18 ++--
 datafusion/core/tests/sql/set_variable.rs          |  54 +++-------
 datafusion/core/tests/sql/udf.rs                   |   2 +-
 datafusion/core/tests/sql/wildcard.rs              |  76 +++++++-------
 datafusion/core/tests/sql/window.rs                |  16 +--
 datafusion/core/tests/user_defined_aggregates.rs   |  12 +--
 datafusion/expr/Cargo.toml                         |   2 +-
 datafusion/jit/Cargo.toml                          |   2 +-
 datafusion/optimizer/Cargo.toml                    |   2 +-
 datafusion/physical-expr/Cargo.toml                |   6 +-
 datafusion/proto/Cargo.toml                        |   2 +-
 datafusion/row/Cargo.toml                          |   2 +-
 datafusion/sql/Cargo.toml                          |   2 +-
 parquet-test-utils/Cargo.toml                      |   2 +-
 test-utils/Cargo.toml                              |   2 +-
 35 files changed, 349 insertions(+), 381 deletions(-)

diff --git a/benchmarks/Cargo.toml b/benchmarks/Cargo.toml
index 02eeb00a3..b494db7ec 100644
--- a/benchmarks/Cargo.toml
+++ b/benchmarks/Cargo.toml
@@ -33,14 +33,14 @@ simd = ["datafusion/simd"]
 snmalloc = ["snmalloc-rs"]
 
 [dependencies]
-arrow = "32.0.0"
+arrow = "33.0.0"
 datafusion = { path = "../datafusion/core", version = "18.0.0", features = 
["scheduler"] }
 env_logger = "0.10"
 futures = "0.3"
 mimalloc = { version = "0.1", optional = true, default-features = false }
 num_cpus = "1.13.0"
 object_store = "0.5.4"
-parquet = "32.0.0"
+parquet = "33.0.0"
 parquet-test-utils = { path = "../parquet-test-utils/", version = "0.1.0" }
 rand = "0.8.4"
 serde = { version = "1.0.136", features = ["derive"] }
diff --git a/datafusion-cli/Cargo.lock b/datafusion-cli/Cargo.lock
index 955861be1..4b3f57d6d 100644
--- a/datafusion-cli/Cargo.lock
+++ b/datafusion-cli/Cargo.lock
@@ -68,9 +68,9 @@ checksum = 
"8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
 
 [[package]]
 name = "arrow"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "87d948f553cf556656eb89265700258e1032d26fec9b7920cd20319336e06afd"
+checksum = "f3724c874f1517cf898cd1c3ad18ab5071edf893c48e73139ab1e16cf0f2affe"
 dependencies = [
  "ahash",
  "arrow-arith",
@@ -91,9 +91,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-arith"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "cf30d4ebc3df9dfd8bd26883aa30687d4ddcfd7b2443e62bd7c8fedf153b8e45"
+checksum = "e958823b8383ca14d0a2e973de478dd7674cd9f72837f8c41c132a0fda6a4e5e"
 dependencies = [
  "arrow-array",
  "arrow-buffer",
@@ -106,9 +106,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-array"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "9fe66ec388d882a61fff3eb613b5266af133aa08a3318e5e493daf0f5c1696cb"
+checksum = "db670eab50e76654065b5aed930f4367101fcddcb2223802007d1e0b4d5a2579"
 dependencies = [
  "ahash",
  "arrow-buffer",
@@ -122,9 +122,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-buffer"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "4ef967dadbccd4586ec8d7aab27d7033ecb5dfae8a605c839613039eac227bda"
+checksum = "9f0e01c931882448c0407bd32311a624b9f099739e94e786af68adc97016b5f2"
 dependencies = [
  "half",
  "num",
@@ -132,9 +132,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-cast"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "491a7979ea9e76dc218f532896e2d245fde5235e2e6420ce80d27cf6395dda84"
+checksum = "4bf35d78836c93f80d9362f3ccb47ff5e2c5ecfc270ff42cdf1ef80334961d44"
 dependencies = [
  "arrow-array",
  "arrow-buffer",
@@ -148,9 +148,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-csv"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "4b1d4fc91078dbe843c2c50d90f8119c96e8dfac2f78d30f7a8cb9397399c61d"
+checksum = "0a6aa7c2531d89d01fed8c469a9b1bf97132a0bdf70b4724fe4bbb4537a50880"
 dependencies = [
  "arrow-array",
  "arrow-buffer",
@@ -167,9 +167,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-data"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "ee0c0e3c5d3b80be8f267f4b2af714c08cad630569be01a8379cfe27b4866495"
+checksum = "ea50db4d1e1e4c2da2bfdea7b6d2722eef64267d5ab680d815f7ae42428057f5"
 dependencies = [
  "arrow-buffer",
  "arrow-schema",
@@ -179,9 +179,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-ipc"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "0a3ca7eb8d23c83fe40805cbafec70a6a31df72de47355545ff34c850f715403"
+checksum = "a4042fe6585155d1ec28a8e4937ec901a3ca7a19a22b9f6cd3f551b935cd84f5"
 dependencies = [
  "arrow-array",
  "arrow-buffer",
@@ -193,9 +193,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-json"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "bf65aff76d2e340d827d5cab14759e7dd90891a288347e2202e4ee28453d9bed"
+checksum = "7c907c4ab4f26970a3719dc06e78e8054a01d0c96da3664d23b941e201b33d2b"
 dependencies = [
  "arrow-array",
  "arrow-buffer",
@@ -212,9 +212,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-ord"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "074a5a55c37ae4750af4811c8861c0378d8ab2ff6c262622ad24efae6e0b73b3"
+checksum = "e131b447242a32129efc7932f58ed8931b42f35d8701c1a08f9f524da13b1d3c"
 dependencies = [
  "arrow-array",
  "arrow-buffer",
@@ -226,9 +226,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-row"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "e064ac4e64960ebfbe35f218f5e7d9dc9803b59c2e56f611da28ce6d008f839e"
+checksum = "b591ef70d76f4ac28dd7666093295fece0e5f9298f49af51ea49c001e1635bb6"
 dependencies = [
  "ahash",
  "arrow-array",
@@ -241,15 +241,15 @@ dependencies = [
 
 [[package]]
 name = "arrow-schema"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "ead3f373b9173af52f2fdefcb5a7dd89f453fbc40056f574a8aeb23382a4ef81"
+checksum = "eb327717d87eb94be5eff3b0cb8987f54059d343ee5235abf7f143c85f54cfc8"
 
 [[package]]
 name = "arrow-select"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "646b4f15b5a77c970059e748aeb1539705c68cd397ecf0f0264c4ef3737d35f3"
+checksum = "79d3c389d1cea86793934f31594f914c8547d82e91e3411d4833ad0aac3266a7"
 dependencies = [
  "arrow-array",
  "arrow-buffer",
@@ -260,9 +260,9 @@ dependencies = [
 
 [[package]]
 name = "arrow-string"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "c8b8bf150caaeca03f39f1a91069701387d93f7cfd256d27f423ac8496d99a51"
+checksum = "30ee67790496dd310ddbf5096870324431e89aa76453e010020ac29b1184d356"
 dependencies = [
  "arrow-array",
  "arrow-buffer",
@@ -1773,9 +1773,9 @@ dependencies = [
 
 [[package]]
 name = "parquet"
-version = "32.0.0"
+version = "33.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index";
-checksum = "23b3d4917209e17e1da5fb07d276da237a42465f0def2b8d5fa5ce0e85855b4c"
+checksum = "b1b076829801167d889795cd1957989055543430fa1469cb1f6e32b789bfc764"
 dependencies = [
  "ahash",
  "arrow-array",
diff --git a/datafusion-cli/Cargo.toml b/datafusion-cli/Cargo.toml
index 375e86b7b..d5cdc5e64 100644
--- a/datafusion-cli/Cargo.toml
+++ b/datafusion-cli/Cargo.toml
@@ -21,7 +21,7 @@ description = "Command Line Client for DataFusion query 
engine."
 version = "18.0.0"
 authors = ["Apache Arrow <[email protected]>"]
 edition = "2021"
-keywords = [ "arrow", "datafusion", "query", "sql" ]
+keywords = ["arrow", "datafusion", "query", "sql"]
 license = "Apache-2.0"
 homepage = "https://github.com/apache/arrow-datafusion";
 repository = "https://github.com/apache/arrow-datafusion";
@@ -29,7 +29,7 @@ rust-version = "1.62"
 readme = "README.md"
 
 [dependencies]
-arrow = "32.0.0"
+arrow = "33.0.0"
 async-trait = "0.1.41"
 clap = { version = "3", features = ["derive", "cargo"] }
 datafusion = { path = "../datafusion/core", version = "18.0.0" }
diff --git a/datafusion-examples/Cargo.toml b/datafusion-examples/Cargo.toml
index 4f50ca614..5bbf6cce5 100644
--- a/datafusion-examples/Cargo.toml
+++ b/datafusion-examples/Cargo.toml
@@ -34,9 +34,9 @@ path = "examples/avro_sql.rs"
 required-features = ["datafusion/avro"]
 
 [dev-dependencies]
-arrow = "32.0.0"
-arrow-flight = { version = "32.0.0", features = ["flight-sql-experimental"] }
-arrow-schema = "32.0.0"
+arrow = "33.0.0"
+arrow-flight = { version = "33.0.0", features = ["flight-sql-experimental"] }
+arrow-schema = "33.0.0"
 async-trait = "0.1.41"
 dashmap = "5.4"
 datafusion = { path = "../datafusion/core" }
diff --git a/datafusion/common/Cargo.toml b/datafusion/common/Cargo.toml
index 0dcae674f..7bb77dd67 100644
--- a/datafusion/common/Cargo.toml
+++ b/datafusion/common/Cargo.toml
@@ -40,11 +40,11 @@ pyarrow = ["pyo3", "arrow/pyarrow"]
 
 [dependencies]
 apache-avro = { version = "0.14", default-features = false, features = 
["snappy"], optional = true }
-arrow = { version = "32.0.0", default-features = false }
+arrow = { version = "33.0.0", default-features = false }
 chrono = { version = "0.4", default-features = false }
 cranelift-module = { version = "0.92.0", optional = true }
 num_cpus = "1.13.0"
 object_store = { version = "0.5.4", default-features = false, optional = true }
-parquet = { version = "32.0.0", default-features = false, optional = true }
+parquet = { version = "33.0.0", default-features = false, optional = true }
 pyo3 = { version = "0.18.0", optional = true }
 sqlparser = "0.30"
diff --git a/datafusion/core/Cargo.toml b/datafusion/core/Cargo.toml
index 15702c29c..53fcc3bee 100644
--- a/datafusion/core/Cargo.toml
+++ b/datafusion/core/Cargo.toml
@@ -60,7 +60,7 @@ unicode_expressions = 
["datafusion-physical-expr/regex_expressions", "datafusion
 [dependencies]
 ahash = { version = "0.8", default-features = false, features = 
["runtime-rng"] }
 apache-avro = { version = "0.14", optional = true }
-arrow = { version = "32.0.0", features = ["prettyprint"] }
+arrow = { version = "33.0.0", features = ["prettyprint"] }
 async-compression = { version = "0.3.14", features = ["bzip2", "gzip", "xz", 
"futures-io", "tokio"], optional = true }
 async-trait = "0.1.41"
 bytes = "1.1"
@@ -86,7 +86,7 @@ num-traits = { version = "0.2", optional = true }
 num_cpus = "1.13.0"
 object_store = "0.5.3"
 parking_lot = "0.12"
-parquet = { version = "32.0.0", features = ["arrow", "async"] }
+parquet = { version = "33.0.0", features = ["arrow", "async"] }
 paste = "^1.0"
 percent-encoding = "2.2.0"
 pin-project-lite = "^0.2.7"
diff --git a/datafusion/core/src/datasource/file_format/avro.rs 
b/datafusion/core/src/datasource/file_format/avro.rs
index 75649c2a3..1b6d2b3bc 100644
--- a/datafusion/core/src/datasource/file_format/avro.rs
+++ b/datafusion/core/src/datasource/file_format/avro.rs
@@ -179,13 +179,13 @@ mod tests {
             
"+----+----------+-------------+--------------+---------+------------+-----------+------------+------------------+------------+---------------------+",
             "| id | bool_col | tinyint_col | smallint_col | int_col | 
bigint_col | float_col | double_col | date_string_col  | string_col | 
timestamp_col       |",
             
"+----+----------+-------------+--------------+---------+------------+-----------+------------+------------------+------------+---------------------+",
-            "| 4  | true     | 0           | 0            | 0       | 0        
  | 0         | 0          | 30332f30312f3039 | 30         | 
2009-03-01T00:00:00 |",
+            "| 4  | true     | 0           | 0            | 0       | 0        
  | 0.0       | 0.0        | 30332f30312f3039 | 30         | 
2009-03-01T00:00:00 |",
             "| 5  | false    | 1           | 1            | 1       | 10       
  | 1.1       | 10.1       | 30332f30312f3039 | 31         | 
2009-03-01T00:01:00 |",
-            "| 6  | true     | 0           | 0            | 0       | 0        
  | 0         | 0          | 30342f30312f3039 | 30         | 
2009-04-01T00:00:00 |",
+            "| 6  | true     | 0           | 0            | 0       | 0        
  | 0.0       | 0.0        | 30342f30312f3039 | 30         | 
2009-04-01T00:00:00 |",
             "| 7  | false    | 1           | 1            | 1       | 10       
  | 1.1       | 10.1       | 30342f30312f3039 | 31         | 
2009-04-01T00:01:00 |",
-            "| 2  | true     | 0           | 0            | 0       | 0        
  | 0         | 0          | 30322f30312f3039 | 30         | 
2009-02-01T00:00:00 |",
+            "| 2  | true     | 0           | 0            | 0       | 0        
  | 0.0       | 0.0        | 30322f30312f3039 | 30         | 
2009-02-01T00:00:00 |",
             "| 3  | false    | 1           | 1            | 1       | 10       
  | 1.1       | 10.1       | 30322f30312f3039 | 31         | 
2009-02-01T00:01:00 |",
-            "| 0  | true     | 0           | 0            | 0       | 0        
  | 0         | 0          | 30312f30312f3039 | 30         | 
2009-01-01T00:00:00 |",
+            "| 0  | true     | 0           | 0            | 0       | 0        
  | 0.0       | 0.0        | 30312f30312f3039 | 30         | 
2009-01-01T00:00:00 |",
             "| 1  | false    | 1           | 1            | 1       | 10       
  | 1.1       | 10.1       | 30312f30312f3039 | 31         | 
2009-01-01T00:01:00 |",
             
"+----+----------+-------------+--------------+---------+------------+-----------+------------+------------------+------------+---------------------+",
         ];
diff --git a/datafusion/core/src/execution/context.rs 
b/datafusion/core/src/execution/context.rs
index 251eaf0ef..d057828de 100644
--- a/datafusion/core/src/execution/context.rs
+++ b/datafusion/core/src/execution/context.rs
@@ -2374,7 +2374,7 @@ mod tests {
             "+-------------+",
             "| MY_AVG(t.i) |",
             "+-------------+",
-            "| 1           |",
+            "| 1.0         |",
             "+-------------+",
         ];
         assert_batches_eq!(expected, &result);
diff --git a/datafusion/core/src/physical_optimizer/pruning.rs 
b/datafusion/core/src/physical_optimizer/pruning.rs
index f3d363017..c80527e66 100644
--- a/datafusion/core/src/physical_optimizer/pruning.rs
+++ b/datafusion/core/src/physical_optimizer/pruning.rs
@@ -1213,7 +1213,7 @@ mod tests {
             Field::new("s1_min", DataType::Utf8, true),
         )]);
 
-        // Note the statistics return binary (which can't be cast to string)
+        // Note the statistics return an invalid UTF-8 sequence which will be 
converted to null
         let statistics = OneContainerStats {
             min_values: Some(Arc::new(BinaryArray::from_slice([&[255u8] as 
&[u8]]))),
             max_values: None,
diff --git a/datafusion/core/src/physical_plan/aggregates/mod.rs 
b/datafusion/core/src/physical_plan/aggregates/mod.rs
index 8ca50fec8..90a19a3b6 100644
--- a/datafusion/core/src/physical_plan/aggregates/mod.rs
+++ b/datafusion/core/src/physical_plan/aggregates/mod.rs
@@ -811,22 +811,22 @@ mod tests {
             common::collect(partial_aggregate.execute(0, 
task_ctx.clone())?).await?;
 
         let expected = vec![
-            "+---+---+-----------------+",
-            "| a | b | COUNT(1)[count] |",
-            "+---+---+-----------------+",
-            "|   | 1 | 2               |",
-            "|   | 2 | 2               |",
-            "|   | 3 | 2               |",
-            "|   | 4 | 2               |",
-            "| 2 |   | 2               |",
-            "| 2 | 1 | 2               |",
-            "| 3 |   | 3               |",
-            "| 3 | 2 | 2               |",
-            "| 3 | 3 | 1               |",
-            "| 4 |   | 3               |",
-            "| 4 | 3 | 1               |",
-            "| 4 | 4 | 2               |",
-            "+---+---+-----------------+",
+            "+---+-----+-----------------+",
+            "| a | b   | COUNT(1)[count] |",
+            "+---+-----+-----------------+",
+            "|   | 1.0 | 2               |",
+            "|   | 2.0 | 2               |",
+            "|   | 3.0 | 2               |",
+            "|   | 4.0 | 2               |",
+            "| 2 |     | 2               |",
+            "| 2 | 1.0 | 2               |",
+            "| 3 |     | 3               |",
+            "| 3 | 2.0 | 2               |",
+            "| 3 | 3.0 | 1               |",
+            "| 4 |     | 3               |",
+            "| 4 | 3.0 | 1               |",
+            "| 4 | 4.0 | 2               |",
+            "+---+-----+-----------------+",
         ];
         assert_batches_sorted_eq!(expected, &result);
 
@@ -858,22 +858,22 @@ mod tests {
         assert_eq!(batch.num_rows(), 12);
 
         let expected = vec![
-            "+---+---+----------+",
-            "| a | b | COUNT(1) |",
-            "+---+---+----------+",
-            "|   | 1 | 2        |",
-            "|   | 2 | 2        |",
-            "|   | 3 | 2        |",
-            "|   | 4 | 2        |",
-            "| 2 |   | 2        |",
-            "| 2 | 1 | 2        |",
-            "| 3 |   | 3        |",
-            "| 3 | 2 | 2        |",
-            "| 3 | 3 | 1        |",
-            "| 4 |   | 3        |",
-            "| 4 | 3 | 1        |",
-            "| 4 | 4 | 2        |",
-            "+---+---+----------+",
+            "+---+-----+----------+",
+            "| a | b   | COUNT(1) |",
+            "+---+-----+----------+",
+            "|   | 1.0 | 2        |",
+            "|   | 2.0 | 2        |",
+            "|   | 3.0 | 2        |",
+            "|   | 4.0 | 2        |",
+            "| 2 |     | 2        |",
+            "| 2 | 1.0 | 2        |",
+            "| 3 |     | 3        |",
+            "| 3 | 2.0 | 2        |",
+            "| 3 | 3.0 | 1        |",
+            "| 4 |     | 3        |",
+            "| 4 | 3.0 | 1        |",
+            "| 4 | 4.0 | 2        |",
+            "+---+-----+----------+",
         ];
 
         assert_batches_sorted_eq!(&expected, &result);
@@ -919,9 +919,9 @@ mod tests {
             "+---+---------------+-------------+",
             "| a | AVG(b)[count] | AVG(b)[sum] |",
             "+---+---------------+-------------+",
-            "| 2 | 2             | 2           |",
-            "| 3 | 3             | 7           |",
-            "| 4 | 3             | 11          |",
+            "| 2 | 2             | 2.0         |",
+            "| 3 | 3             | 7.0         |",
+            "| 4 | 3             | 11.0        |",
             "+---+---------------+-------------+",
         ];
         assert_batches_sorted_eq!(expected, &result);
@@ -956,7 +956,7 @@ mod tests {
             "+---+--------------------+",
             "| a | AVG(b)             |",
             "+---+--------------------+",
-            "| 2 | 1                  |",
+            "| 2 | 1.0                |",
             "| 3 | 2.3333333333333335 |", // 3, (2 + 3 + 2) / 3
             "| 4 | 3.6666666666666665 |", // 4, (3 + 4 + 4) / 3
             "+---+--------------------+",
diff --git a/datafusion/core/src/physical_plan/file_format/json.rs 
b/datafusion/core/src/physical_plan/file_format/json.rs
index 6d26965ec..73cbe24d4 100644
--- a/datafusion/core/src/physical_plan/file_format/json.rs
+++ b/datafusion/core/src/physical_plan/file_format/json.rs
@@ -364,14 +364,14 @@ mod tests {
 
         assert_batches_eq!(
             &[
-                "+-----+----------------+---------------+------+",
-                "| a   | b              | c             | d    |",
-                "+-----+----------------+---------------+------+",
-                "| 1   | [2, 1.3, -6.1] | [false, true] | 4    |",
-                "| -10 | [2, 1.3, -6.1] | [true, true]  | 4    |",
-                "| 2   | [2, , -6.1]    | [false, ]     | text |",
-                "|     |                |               |      |",
-                "+-----+----------------+---------------+------+",
+                "+-----+------------------+---------------+------+",
+                "| a   | b                | c             | d    |",
+                "+-----+------------------+---------------+------+",
+                "| 1   | [2.0, 1.3, -6.1] | [false, true] | 4    |",
+                "| -10 | [2.0, 1.3, -6.1] | [true, true]  | 4    |",
+                "| 2   | [2.0, , -6.1]    | [false, ]     | text |",
+                "|     |                  |               |      |",
+                "+-----+------------------+---------------+------+",
             ],
             &results
         );
diff --git a/datafusion/core/src/physical_plan/joins/sort_merge_join.rs 
b/datafusion/core/src/physical_plan/joins/sort_merge_join.rs
index 88667b692..8fa514593 100644
--- a/datafusion/core/src/physical_plan/joins/sort_merge_join.rs
+++ b/datafusion/core/src/physical_plan/joins/sort_merge_join.rs
@@ -1983,13 +1983,13 @@ mod tests {
         let (_, batches) = join_collect(left, right, on, 
JoinType::Inner).await?;
 
         let expected = vec![
-            
"+------------+------------+------------+------------+------------+------------+",
-            "| a1         | b1         | c1         | a2         | b1         
| c2         |",
-            
"+------------+------------+------------+------------+------------+------------+",
-            "| 1970-01-01 | 2022-04-23 | 1970-01-01 | 1970-01-01 | 2022-04-23 
| 1970-01-01 |",
-            "| 1970-01-01 | 2022-04-25 | 1970-01-01 | 1970-01-01 | 2022-04-25 
| 1970-01-01 |",
-            "| 1970-01-01 | 2022-04-25 | 1970-01-01 | 1970-01-01 | 2022-04-25 
| 1970-01-01 |",
-            
"+------------+------------+------------+------------+------------+------------+",
+            
"+-------------------------+---------------------+-------------------------+-------------------------+---------------------+-------------------------+",
+            "| a1                      | b1                  | c1              
        | a2                      | b1                  | c2                    
  |",
+            
"+-------------------------+---------------------+-------------------------+-------------------------+---------------------+-------------------------+",
+            "| 1970-01-01T00:00:00.001 | 2022-04-23T08:44:01 | 
1970-01-01T00:00:00.007 | 1970-01-01T00:00:00.010 | 2022-04-23T08:44:01 | 
1970-01-01T00:00:00.070 |",
+            "| 1970-01-01T00:00:00.002 | 2022-04-25T16:17:21 | 
1970-01-01T00:00:00.008 | 1970-01-01T00:00:00.030 | 2022-04-25T16:17:21 | 
1970-01-01T00:00:00.090 |",
+            "| 1970-01-01T00:00:00.003 | 2022-04-25T16:17:21 | 
1970-01-01T00:00:00.009 | 1970-01-01T00:00:00.030 | 2022-04-25T16:17:21 | 
1970-01-01T00:00:00.090 |",
+            
"+-------------------------+---------------------+-------------------------+-------------------------+---------------------+-------------------------+",
         ];
         // The output order is important as SMJ preserves sortedness
         assert_batches_eq!(expected, &batches);
diff --git a/datafusion/core/tests/dataframe.rs 
b/datafusion/core/tests/dataframe.rs
index f10902839..f0183838d 100644
--- a/datafusion/core/tests/dataframe.rs
+++ b/datafusion/core/tests/dataframe.rs
@@ -361,19 +361,19 @@ async fn test_grouping_set_array_agg_with_overflow() -> 
Result<()> {
         "|    | 2  | 184    | 8.363636363636363   |",
         "|    | 1  | 367    | 16.681818181818183  |",
         "| e  |    | 847    | 40.333333333333336  |",
-        "| e  | 5  | -22    | -11                 |",
+        "| e  | 5  | -22    | -11.0               |",
         "| e  | 4  | 261    | 37.285714285714285  |",
-        "| e  | 3  | 192    | 48                  |",
+        "| e  | 3  | 192    | 48.0                |",
         "| e  | 2  | 189    | 37.8                |",
         "| e  | 1  | 227    | 75.66666666666667   |",
         "| d  |    | 458    | 25.444444444444443  |",
         "| d  | 5  | -99    | -49.5               |",
-        "| d  | 4  | 162    | 54                  |",
+        "| d  | 4  | 162    | 54.0                |",
         "| d  | 3  | 124    | 41.333333333333336  |",
         "| d  | 2  | 328    | 109.33333333333333  |",
         "| d  | 1  | -57    | -8.142857142857142  |",
         "| c  |    | -28    | -1.3333333333333333 |",
-        "| c  | 5  | 24     | 12                  |",
+        "| c  | 5  | 24     | 12.0                |",
         "| c  | 4  | -43    | -10.75              |",
         "| c  | 3  | 190    | 47.5                |",
         "| c  | 2  | -389   | -55.57142857142857  |",
@@ -381,12 +381,12 @@ async fn test_grouping_set_array_agg_with_overflow() -> 
Result<()> {
         "| b  |    | -111   | -5.842105263157895  |",
         "| b  | 5  | -1     | -0.2                |",
         "| b  | 4  | -223   | -44.6               |",
-        "| b  | 3  | -84    | -42                 |",
+        "| b  | 3  | -84    | -42.0               |",
         "| b  | 2  | 102    | 25.5                |",
         "| b  | 1  | 95     | 31.666666666666668  |",
         "| a  |    | -385   | -18.333333333333332 |",
-        "| a  | 5  | -96    | -32                 |",
-        "| a  | 4  | -128   | -32                 |",
+        "| a  | 5  | -96    | -32.0               |",
+        "| a  | 4  | -128   | -32.0               |",
         "| a  | 3  | -27    | -4.5                |",
         "| a  | 2  | -46    | -15.333333333333334 |",
         "| a  | 1  | -88    | -17.6               |",
@@ -548,14 +548,14 @@ async fn unnest_columns() -> Result<()> {
     let df = table_with_nested_types(NUM_ROWS).await?;
     let results = df.collect().await?;
     let expected = vec![
-        
r#"+----------+------------------------------------------------------------+--------------------+"#,
-        r#"| shape_id | points                                                 
    | tags               |"#,
-        
r#"+----------+------------------------------------------------------------+--------------------+"#,
-        r#"| 1        | [{"x": -3, "y": -4}, {"x": -3, "y": 6}, {"x": 2, "y": 
-2}] | [tag1]             |"#,
-        r#"| 2        |                                                        
    | [tag1, tag2]       |"#,
-        r#"| 3        | [{"x": -9, "y": 2}, {"x": -10, "y": -4}]               
    |                    |"#,
-        r#"| 4        | [{"x": -3, "y": 5}, {"x": 2, "y": -1}]                 
    | [tag1, tag2, tag3] |"#,
-        
r#"+----------+------------------------------------------------------------+--------------------+"#,
+        
"+----------+------------------------------------------------+--------------------+",
+        "| shape_id | points                                         | tags    
           |",
+        
"+----------+------------------------------------------------+--------------------+",
+        "| 1        | [{x: -3, y: -4}, {x: -3, y: 6}, {x: 2, y: -2}] | [tag1]  
           |",
+        "| 2        |                                                | [tag1, 
tag2]       |",
+        "| 3        | [{x: -9, y: 2}, {x: -10, y: -4}]               |         
           |",
+        "| 4        | [{x: -3, y: 5}, {x: 2, y: -1}]                 | [tag1, 
tag2, tag3] |",
+        
"+----------+------------------------------------------------+--------------------+",
     ];
     assert_batches_sorted_eq!(expected, &results);
 
@@ -563,17 +563,17 @@ async fn unnest_columns() -> Result<()> {
     let df = table_with_nested_types(NUM_ROWS).await?;
     let results = df.unnest_column("tags")?.collect().await?;
     let expected = vec![
-        
r#"+----------+------------------------------------------------------------+------+"#,
-        r#"| shape_id | points                                                 
    | tags |"#,
-        
r#"+----------+------------------------------------------------------------+------+"#,
-        r#"| 1        | [{"x": -3, "y": -4}, {"x": -3, "y": 6}, {"x": 2, "y": 
-2}] | tag1 |"#,
-        r#"| 2        |                                                        
    | tag1 |"#,
-        r#"| 2        |                                                        
    | tag2 |"#,
-        r#"| 3        | [{"x": -9, "y": 2}, {"x": -10, "y": -4}]               
    |      |"#,
-        r#"| 4        | [{"x": -3, "y": 5}, {"x": 2, "y": -1}]                 
    | tag1 |"#,
-        r#"| 4        | [{"x": -3, "y": 5}, {"x": 2, "y": -1}]                 
    | tag2 |"#,
-        r#"| 4        | [{"x": -3, "y": 5}, {"x": 2, "y": -1}]                 
    | tag3 |"#,
-        
r#"+----------+------------------------------------------------------------+------+"#,
+        "+----------+------------------------------------------------+------+",
+        "| shape_id | points                                         | tags |",
+        "+----------+------------------------------------------------+------+",
+        "| 1        | [{x: -3, y: -4}, {x: -3, y: 6}, {x: 2, y: -2}] | tag1 |",
+        "| 2        |                                                | tag1 |",
+        "| 2        |                                                | tag2 |",
+        "| 3        | [{x: -9, y: 2}, {x: -10, y: -4}]               |      |",
+        "| 4        | [{x: -3, y: 5}, {x: 2, y: -1}]                 | tag1 |",
+        "| 4        | [{x: -3, y: 5}, {x: 2, y: -1}]                 | tag2 |",
+        "| 4        | [{x: -3, y: 5}, {x: 2, y: -1}]                 | tag3 |",
+        "+----------+------------------------------------------------+------+",
     ];
     assert_batches_sorted_eq!(expected, &results);
 
@@ -586,18 +586,18 @@ async fn unnest_columns() -> Result<()> {
     let df = table_with_nested_types(NUM_ROWS).await?;
     let results = df.unnest_column("points")?.collect().await?;
     let expected = vec![
-        r#"+----------+---------------------+--------------------+"#,
-        r#"| shape_id | points              | tags               |"#,
-        r#"+----------+---------------------+--------------------+"#,
-        r#"| 1        | {"x": -3, "y": -4}  | [tag1]             |"#,
-        r#"| 1        | {"x": -3, "y": 6}   | [tag1]             |"#,
-        r#"| 1        | {"x": 2, "y": -2}   | [tag1]             |"#,
-        r#"| 2        |                     | [tag1, tag2]       |"#,
-        r#"| 3        | {"x": -9, "y": 2}   |                    |"#,
-        r#"| 3        | {"x": -10, "y": -4} |                    |"#,
-        r#"| 4        | {"x": -3, "y": 5}   | [tag1, tag2, tag3] |"#,
-        r#"| 4        | {"x": 2, "y": -1}   | [tag1, tag2, tag3] |"#,
-        r#"+----------+---------------------+--------------------+"#,
+        "+----------+-----------------+--------------------+",
+        "| shape_id | points          | tags               |",
+        "+----------+-----------------+--------------------+",
+        "| 1        | {x: -3, y: -4}  | [tag1]             |",
+        "| 1        | {x: -3, y: 6}   | [tag1]             |",
+        "| 1        | {x: 2, y: -2}   | [tag1]             |",
+        "| 2        |                 | [tag1, tag2]       |",
+        "| 3        | {x: -10, y: -4} |                    |",
+        "| 3        | {x: -9, y: 2}   |                    |",
+        "| 4        | {x: -3, y: 5}   | [tag1, tag2, tag3] |",
+        "| 4        | {x: 2, y: -1}   | [tag1, tag2, tag3] |",
+        "+----------+-----------------+--------------------+",
     ];
     assert_batches_sorted_eq!(expected, &results);
 
@@ -614,23 +614,23 @@ async fn unnest_columns() -> Result<()> {
         .collect()
         .await?;
     let expected = vec![
-        r#"+----------+---------------------+------+"#,
-        r#"| shape_id | points              | tags |"#,
-        r#"+----------+---------------------+------+"#,
-        r#"| 1        | {"x": -3, "y": -4}  | tag1 |"#,
-        r#"| 1        | {"x": -3, "y": 6}   | tag1 |"#,
-        r#"| 1        | {"x": 2, "y": -2}   | tag1 |"#,
-        r#"| 2        |                     | tag1 |"#,
-        r#"| 2        |                     | tag2 |"#,
-        r#"| 3        | {"x": -9, "y": 2}   |      |"#,
-        r#"| 3        | {"x": -10, "y": -4} |      |"#,
-        r#"| 4        | {"x": -3, "y": 5}   | tag1 |"#,
-        r#"| 4        | {"x": -3, "y": 5}   | tag2 |"#,
-        r#"| 4        | {"x": -3, "y": 5}   | tag3 |"#,
-        r#"| 4        | {"x": 2, "y": -1}   | tag1 |"#,
-        r#"| 4        | {"x": 2, "y": -1}   | tag2 |"#,
-        r#"| 4        | {"x": 2, "y": -1}   | tag3 |"#,
-        r#"+----------+---------------------+------+"#,
+        "+----------+-----------------+------+",
+        "| shape_id | points          | tags |",
+        "+----------+-----------------+------+",
+        "| 1        | {x: -3, y: -4}  | tag1 |",
+        "| 1        | {x: -3, y: 6}   | tag1 |",
+        "| 1        | {x: 2, y: -2}   | tag1 |",
+        "| 2        |                 | tag1 |",
+        "| 2        |                 | tag2 |",
+        "| 3        | {x: -10, y: -4} |      |",
+        "| 3        | {x: -9, y: 2}   |      |",
+        "| 4        | {x: -3, y: 5}   | tag1 |",
+        "| 4        | {x: -3, y: 5}   | tag2 |",
+        "| 4        | {x: -3, y: 5}   | tag3 |",
+        "| 4        | {x: 2, y: -1}   | tag1 |",
+        "| 4        | {x: 2, y: -1}   | tag2 |",
+        "| 4        | {x: 2, y: -1}   | tag3 |",
+        "+----------+-----------------+------+",
     ];
     assert_batches_sorted_eq!(expected, &results);
 
diff --git a/datafusion/core/tests/dataframe_functions.rs 
b/datafusion/core/tests/dataframe_functions.rs
index c6291dc36..8d280692e 100644
--- a/datafusion/core/tests/dataframe_functions.rs
+++ b/datafusion/core/tests/dataframe_functions.rs
@@ -662,10 +662,10 @@ async fn test_cast() -> Result<()> {
         "+--------+",
         "| test.b |",
         "+--------+",
-        "| 1      |",
-        "| 10     |",
-        "| 10     |",
-        "| 100    |",
+        "| 1.0    |",
+        "| 10.0   |",
+        "| 10.0   |",
+        "| 100.0  |",
         "+--------+",
     ];
 
diff --git a/datafusion/core/tests/sql/aggregates.rs 
b/datafusion/core/tests/sql/aggregates.rs
index cea7edf74..3eaa2fd5d 100644
--- a/datafusion/core/tests/sql/aggregates.rs
+++ b/datafusion/core/tests/sql/aggregates.rs
@@ -962,11 +962,11 @@ async fn test_accumulator_row_accumulator() -> Result<()> 
{
         
"+----+----+--------------------------------+-----------+--------------------------------+------------+--------------------+--------------------------------+------+--------------+",
         "| c1 | c2 | min1                           | min2      | max1         
                  | max2       | avg1               | min3                      
     | cnt1 | sum1         |",
         
"+----+----+--------------------------------+-----------+--------------------------------+------------+--------------------+--------------------------------+------+--------------+",
-        "| a  | 1  | 0keZ5G8BffGwgF2RwQD59TFzMStxCB | 774637006 | 
waIGbOGl1PM6gnzZ4uuZt4E2yDWRHs | 4015442341 | 2437927011         | 
0keZ5G8BffGwgF2RwQD59TFzMStxCB | 5    | 6094771121.5 |",
-        "| a  | 2  | b3b9esRhTzFEawbs6XhpKnD9ojutHB | 145294611 | 
ukyD7b0Efj7tNlFSRmzZ0IqkEzg2a8 | 3717551163 | 2267588664         | 
b3b9esRhTzFEawbs6XhpKnD9ojutHB | 3    | 3401364777   |",
+        "| a  | 1  | 0keZ5G8BffGwgF2RwQD59TFzMStxCB | 774637006 | 
waIGbOGl1PM6gnzZ4uuZt4E2yDWRHs | 4015442341 | 2437927011.0       | 
0keZ5G8BffGwgF2RwQD59TFzMStxCB | 5    | 6094771121.5 |",
+        "| a  | 2  | b3b9esRhTzFEawbs6XhpKnD9ojutHB | 145294611 | 
ukyD7b0Efj7tNlFSRmzZ0IqkEzg2a8 | 3717551163 | 2267588664.0       | 
b3b9esRhTzFEawbs6XhpKnD9ojutHB | 3    | 3401364777.0 |",
         "| a  | 3  | Amn2K87Db5Es3dFQO9cw9cvpAM6h35 | 431948861 | 
oLZ21P2JEDooxV1pU31cIxQHEeeoLu | 3998790955 | 2225685115.1666665 | 
Amn2K87Db5Es3dFQO9cw9cvpAM6h35 | 6    | 6676994872.5 |",
-        "| a  | 4  | KJFcmTVjdkCMv94wYCtfHMFhzyRsmH | 466439833 | 
ydkwycaISlYSlEq3TlkS2m15I2pcp8 | 2502326480 | 1655431654         | 
KJFcmTVjdkCMv94wYCtfHMFhzyRsmH | 4    | 3310812222.5 |",
-        "| a  | 5  | MeSTAXq8gVxVjbEjgkvU9YLte0X9uE | 141047417 | 
QJYm7YRA3YetcBHI5wkMZeLXVmfuNy | 2496054700 | 1216992989.6666667 | 
MeSTAXq8gVxVjbEjgkvU9YLte0X9uE | 3    | 1825431770   |",
+        "| a  | 4  | KJFcmTVjdkCMv94wYCtfHMFhzyRsmH | 466439833 | 
ydkwycaISlYSlEq3TlkS2m15I2pcp8 | 2502326480 | 1655431654.0       | 
KJFcmTVjdkCMv94wYCtfHMFhzyRsmH | 4    | 3310812222.5 |",
+        "| a  | 5  | MeSTAXq8gVxVjbEjgkvU9YLte0X9uE | 141047417 | 
QJYm7YRA3YetcBHI5wkMZeLXVmfuNy | 2496054700 | 1216992989.6666667 | 
MeSTAXq8gVxVjbEjgkvU9YLte0X9uE | 3    | 1825431770.0 |",
         
"+----+----+--------------------------------+-----------+--------------------------------+------------+--------------------+--------------------------------+------+--------------+",
     ];
     assert_batches_eq!(expected, &actual);
diff --git a/datafusion/core/tests/sql/expr.rs 
b/datafusion/core/tests/sql/expr.rs
index 01fd26723..24017f9cd 100644
--- a/datafusion/core/tests/sql/expr.rs
+++ b/datafusion/core/tests/sql/expr.rs
@@ -514,7 +514,7 @@ async fn query_without_from() -> Result<()> {
         "+---------------------+---------------------+---------------+",
         "| Int64(1) + Int64(2) | Int64(3) / Int64(4) | cos(Int64(0)) |",
         "+---------------------+---------------------+---------------+",
-        "| 3                   | 0                   | 1             |",
+        "| 3                   | 0                   | 1.0           |",
         "+---------------------+---------------------+---------------+",
     ];
     assert_batches_eq!(expected, &actual);
@@ -818,18 +818,12 @@ async fn test_array_literals() -> Result<()> {
 
 #[tokio::test]
 async fn test_struct_literals() -> Result<()> {
-    test_expression!(
-        "STRUCT(1,2,3,4,5)",
-        "{\"c0\": 1, \"c1\": 2, \"c2\": 3, \"c3\": 4, \"c4\": 5}"
-    );
-    test_expression!("STRUCT(Null)", "{\"c0\": null}");
-    test_expression!("STRUCT(2)", "{\"c0\": 2}");
-    test_expression!("STRUCT('1',Null)", "{\"c0\": \"1\", \"c1\": null}");
-    test_expression!("STRUCT(true, false)", "{\"c0\": true, \"c1\": false}");
-    test_expression!(
-        "STRUCT('str1', 'str2')",
-        "{\"c0\": \"str1\", \"c1\": \"str2\"}"
-    );
+    test_expression!("STRUCT(1,2,3,4,5)", "{c0: 1, c1: 2, c2: 3, c3: 4, c4: 
5}");
+    test_expression!("STRUCT(Null)", "{c0: }");
+    test_expression!("STRUCT(2)", "{c0: 2}");
+    test_expression!("STRUCT('1',Null)", "{c0: 1, c1: }");
+    test_expression!("STRUCT(true, false)", "{c0: true, c1: false}");
+    test_expression!("STRUCT('str1', 'str2')", "{c0: str1, c1: str2}");
 
     Ok(())
 }
@@ -1236,53 +1230,53 @@ async fn in_list_array() -> Result<()> {
 
 #[tokio::test]
 async fn test_extract_date_part() -> Result<()> {
-    test_expression!("date_part('YEAR', CAST('2000-01-01' AS DATE))", "2000");
+    test_expression!("date_part('YEAR', CAST('2000-01-01' AS DATE))", 
"2000.0");
     test_expression!(
         "EXTRACT(year FROM to_timestamp('2020-09-08T12:00:00+00:00'))",
-        "2020"
+        "2020.0"
     );
-    test_expression!("date_part('QUARTER', CAST('2000-01-01' AS DATE))", "1");
+    test_expression!("date_part('QUARTER', CAST('2000-01-01' AS DATE))", 
"1.0");
     test_expression!(
         "EXTRACT(quarter FROM to_timestamp('2020-09-08T12:00:00+00:00'))",
-        "3"
+        "3.0"
     );
-    test_expression!("date_part('MONTH', CAST('2000-01-01' AS DATE))", "1");
+    test_expression!("date_part('MONTH', CAST('2000-01-01' AS DATE))", "1.0");
     test_expression!(
         "EXTRACT(month FROM to_timestamp('2020-09-08T12:00:00+00:00'))",
-        "9"
+        "9.0"
     );
-    test_expression!("date_part('WEEK', CAST('2003-01-01' AS DATE))", "1");
+    test_expression!("date_part('WEEK', CAST('2003-01-01' AS DATE))", "1.0");
     test_expression!(
         "EXTRACT(WEEK FROM to_timestamp('2020-09-08T12:00:00+00:00'))",
-        "37"
+        "37.0"
     );
-    test_expression!("date_part('DAY', CAST('2000-01-01' AS DATE))", "1");
+    test_expression!("date_part('DAY', CAST('2000-01-01' AS DATE))", "1.0");
     test_expression!(
         "EXTRACT(day FROM to_timestamp('2020-09-08T12:00:00+00:00'))",
-        "8"
+        "8.0"
     );
-    test_expression!("date_part('DOY', CAST('2000-01-01' AS DATE))", "1");
+    test_expression!("date_part('DOY', CAST('2000-01-01' AS DATE))", "1.0");
     test_expression!(
         "EXTRACT(doy FROM to_timestamp('2020-09-08T12:00:00+00:00'))",
-        "252"
+        "252.0"
     );
-    test_expression!("date_part('DOW', CAST('2000-01-01' AS DATE))", "6");
+    test_expression!("date_part('DOW', CAST('2000-01-01' AS DATE))", "6.0");
     test_expression!(
         "EXTRACT(dow FROM to_timestamp('2020-09-08T12:00:00+00:00'))",
-        "2"
+        "2.0"
     );
-    test_expression!("date_part('HOUR', CAST('2000-01-01' AS DATE))", "0");
+    test_expression!("date_part('HOUR', CAST('2000-01-01' AS DATE))", "0.0");
     test_expression!(
         "EXTRACT(hour FROM to_timestamp('2020-09-08T12:03:03+00:00'))",
-        "12"
+        "12.0"
     );
     test_expression!(
         "EXTRACT(minute FROM to_timestamp('2020-09-08T12:12:00+00:00'))",
-        "12"
+        "12.0"
     );
     test_expression!(
         "date_part('minute', to_timestamp('2020-09-08T12:12:00+00:00'))",
-        "12"
+        "12.0"
     );
     test_expression!(
         "EXTRACT(second FROM 
to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
@@ -1298,7 +1292,7 @@ async fn test_extract_date_part() -> Result<()> {
     );
     test_expression!(
         "EXTRACT(nanosecond FROM 
to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
-        "12123456780"
+        "1.212345678e10"
     );
     test_expression!(
         "date_part('second', 
to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
@@ -1314,7 +1308,7 @@ async fn test_extract_date_part() -> Result<()> {
     );
     test_expression!(
         "date_part('nanosecond', 
to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
-        "12123456780"
+        "1.212345678e10"
     );
     Ok(())
 }
diff --git a/datafusion/core/tests/sql/functions.rs 
b/datafusion/core/tests/sql/functions.rs
index 615871076..ab3cc82b5 100644
--- a/datafusion/core/tests/sql/functions.rs
+++ b/datafusion/core/tests/sql/functions.rs
@@ -67,8 +67,8 @@ async fn csv_query_cast_literal() -> Result<()> {
         "+--------------------+----------+",
         "| c12                | Int64(1) |",
         "+--------------------+----------+",
-        "| 0.9294097332465232 | 1        |",
-        "| 0.3114712539863804 | 1        |",
+        "| 0.9294097332465232 | 1.0      |",
+        "| 0.3114712539863804 | 1.0      |",
         "+--------------------+----------+",
     ];
 
@@ -344,7 +344,7 @@ async fn case_sensitive_identifiers_functions() {
         "+-----------+",
         "| sqrt(t.i) |",
         "+-----------+",
-        "| 1         |",
+        "| 1.0       |",
         "+-----------+",
     ];
 
@@ -431,7 +431,7 @@ async fn case_builtin_math_expression() {
             "+-----------+",
             "| sqrt(t.v) |",
             "+-----------+",
-            "| 1         |",
+            "| 1.0       |",
             "+-----------+",
         ];
         let results = plan_and_collect(&ctx, "SELECT sqrt(v) FROM t")
@@ -499,10 +499,10 @@ async fn test_power() -> Result<()> {
         
"+-----------+-----------+-----------+-----------+------------------+--------------------+",
         "| power_i32 | power_i64 | power_f32 | power_f64 | power_int_scalar | 
power_float_scalar |",
         
"+-----------+-----------+-----------+-----------+------------------+--------------------+",
-        "| 8         | 8         | 1         | 1         | 8                | 
15.625             |",
-        "| 125       | 125       | 15.625    | 15.625    | 8                | 
15.625             |",
-        "| 0         | 0         | 0         | 0         | 8                | 
15.625             |",
-        "| -2744     | -2744     | -3048.625 | -3048.625 | 8                | 
15.625             |",
+        "| 8         | 8.0       | 1.0       | 1.0       | 8                | 
15.625             |",
+        "| 125       | 125.0     | 15.625    | 15.625    | 8                | 
15.625             |",
+        "| 0         | 0.0       | 0.0       | 0.0       | 8                | 
15.625             |",
+        "| -2744     | -2744.0   | -3048.625 | -3048.625 | 8                | 
15.625             |",
         "|           |           |           |           | 8                | 
15.625             |",
         
"+-----------+-----------+-----------+-----------+------------------+--------------------+",
     ];
diff --git a/datafusion/core/tests/sql/group_by.rs 
b/datafusion/core/tests/sql/group_by.rs
index 5fb106d44..a92eaf0f4 100644
--- a/datafusion/core/tests/sql/group_by.rs
+++ b/datafusion/core/tests/sql/group_by.rs
@@ -73,15 +73,15 @@ async fn csv_query_group_by_float64() -> Result<()> {
     let actual = execute_to_batches(&ctx, sql).await;
 
     let expected = vec![
-        "+-----+----------------+",
-        "| cnt | c2             |",
-        "+-----+----------------+",
-        "| 5   | 0.000000000005 |",
-        "| 4   | 0.000000000004 |",
-        "| 3   | 0.000000000003 |",
-        "| 2   | 0.000000000002 |",
-        "| 1   | 0.000000000001 |",
-        "+-----+----------------+",
+        "+-----+---------+",
+        "| cnt | c2      |",
+        "+-----+---------+",
+        "| 5   | 5.0e-12 |",
+        "| 4   | 4.0e-12 |",
+        "| 3   | 3.0e-12 |",
+        "| 2   | 2.0e-12 |",
+        "| 1   | 1.0e-12 |",
+        "+-----+---------+",
     ];
     assert_batches_eq!(expected, &actual);
 
diff --git a/datafusion/core/tests/sql/joins.rs 
b/datafusion/core/tests/sql/joins.rs
index 6d1b1e91b..a32d959f5 100644
--- a/datafusion/core/tests/sql/joins.rs
+++ b/datafusion/core/tests/sql/joins.rs
@@ -1367,12 +1367,12 @@ async fn hash_join_with_date32() -> Result<()> {
     );
 
     let expected = vec![
-        
"+------------+------------+---------+-----+------------+------------+---------+-----+",
-        "| c1         | c2         | c3      | c4  | c1         | c2         | 
c3      | c4  |",
-        
"+------------+------------+---------+-----+------------+------------+---------+-----+",
-        "| 1970-01-02 | 1970-01-02 | 1.23    | abc | 1970-01-02 | 1970-01-02 | 
-123.12 | abc |",
-        "| 1970-01-04 |            | -123.12 | jkl | 1970-01-04 |            | 
789.00  |     |",
-        
"+------------+------------+---------+-----+------------+------------+---------+-----+",
+        
"+------------+---------------------+---------+-----+------------+---------------------+---------+-----+",
+        "| c1         | c2                  | c3      | c4  | c1         | c2  
                | c3      | c4  |",
+        
"+------------+---------------------+---------+-----+------------+---------------------+---------+-----+",
+        "| 1970-01-02 | 1970-01-02T00:00:00 | 1.23    | abc | 1970-01-02 | 
1970-01-02T00:00:00 | -123.12 | abc |",
+        "| 1970-01-04 |                     | -123.12 | jkl | 1970-01-04 |     
                | 789.00  |     |",
+        
"+------------+---------------------+---------+-----+------------+---------------------+---------+-----+",
     ];
 
     let results = execute_to_batches(&ctx, sql).await;
@@ -1405,14 +1405,14 @@ async fn hash_join_with_date64() -> Result<()> {
     );
 
     let expected = vec![
-        
"+------------+------------+---------+-----+------------+------------+---------+--------+",
-        "| c1         | c2         | c3      | c4  | c1         | c2         | 
c3      | c4     |",
-        
"+------------+------------+---------+-----+------------+------------+---------+--------+",
-        "|            | 1970-01-04 | 789.00  | ghi |            | 1970-01-04 | 
0.00    | qwerty |",
-        "| 1970-01-02 | 1970-01-02 | 1.23    | abc | 1970-01-02 | 1970-01-02 | 
-123.12 | abc    |",
-        "| 1970-01-03 | 1970-01-03 | 456.00  | def |            |            | 
        |        |",
-        "| 1970-01-04 |            | -123.12 | jkl |            |            | 
        |        |",
-        
"+------------+------------+---------+-----+------------+------------+---------+--------+",
+        
"+------------+---------------------+---------+-----+------------+---------------------+---------+--------+",
+        "| c1         | c2                  | c3      | c4  | c1         | c2  
                | c3      | c4     |",
+        
"+------------+---------------------+---------+-----+------------+---------------------+---------+--------+",
+        "|            | 1970-01-04T00:00:00 | 789.00  | ghi |            | 
1970-01-04T00:00:00 | 0.00    | qwerty |",
+        "| 1970-01-02 | 1970-01-02T00:00:00 | 1.23    | abc | 1970-01-02 | 
1970-01-02T00:00:00 | -123.12 | abc    |",
+        "| 1970-01-03 | 1970-01-03T00:00:00 | 456.00  | def |            |     
                |         |        |",
+        "| 1970-01-04 |                     | -123.12 | jkl |            |     
                |         |        |",
+        
"+------------+---------------------+---------+-----+------------+---------------------+---------+--------+",
     ];
 
     let results = execute_to_batches(&ctx, sql).await;
@@ -1445,14 +1445,14 @@ async fn hash_join_with_decimal() -> Result<()> {
     );
 
     let expected = vec![
-    
"+------------+------------+---------+-----+------------+------------+-----------+---------+",
-    "| c1         | c2         | c3      | c4  | c1         | c2         | c3  
      | c4      |",
-    
"+------------+------------+---------+-----+------------+------------+-----------+---------+",
-    "|            |            |         |     |            |            | 
100000.00 | abcdefg |",
-    "|            |            |         |     |            | 1970-01-04 | 
0.00      | qwerty  |",
-    "|            | 1970-01-04 | 789.00  | ghi | 1970-01-04 |            | 
789.00    |         |",
-    "| 1970-01-04 |            | -123.12 | jkl | 1970-01-02 | 1970-01-02 | 
-123.12   | abc     |",
-    
"+------------+------------+---------+-----+------------+------------+-----------+---------+",
+        
"+------------+---------------------+---------+-----+------------+---------------------+-----------+---------+",
+        "| c1         | c2                  | c3      | c4  | c1         | c2  
                | c3        | c4      |",
+        
"+------------+---------------------+---------+-----+------------+---------------------+-----------+---------+",
+        "|            |                     |         |     |            |     
                | 100000.00 | abcdefg |",
+        "|            |                     |         |     |            | 
1970-01-04T00:00:00 | 0.00      | qwerty  |",
+        "|            | 1970-01-04T00:00:00 | 789.00  | ghi | 1970-01-04 |     
                | 789.00    |         |",
+        "| 1970-01-04 |                     | -123.12 | jkl | 1970-01-02 | 
1970-01-02T00:00:00 | -123.12   | abc     |",
+        
"+------------+---------------------+---------+-----+------------+---------------------+-----------+---------+",
     ];
 
     let results = execute_to_batches(&ctx, sql).await;
@@ -1485,11 +1485,11 @@ async fn hash_join_with_dictionary() -> Result<()> {
     );
 
     let expected = vec![
-        
"+------------+------------+------+-----+------------+------------+---------+-----+",
-        "| c1         | c2         | c3   | c4  | c1         | c2         | c3 
     | c4  |",
-        
"+------------+------------+------+-----+------------+------------+---------+-----+",
-        "| 1970-01-02 | 1970-01-02 | 1.23 | abc | 1970-01-02 | 1970-01-02 | 
-123.12 | abc |",
-        
"+------------+------------+------+-----+------------+------------+---------+-----+",
+        
"+------------+---------------------+------+-----+------------+---------------------+---------+-----+",
+        "| c1         | c2                  | c3   | c4  | c1         | c2     
             | c3      | c4  |",
+        
"+------------+---------------------+------+-----+------------+---------------------+---------+-----+",
+        "| 1970-01-02 | 1970-01-02T00:00:00 | 1.23 | abc | 1970-01-02 | 
1970-01-02T00:00:00 | -123.12 | abc |",
+        
"+------------+---------------------+------+-----+------------+---------------------+---------+-----+",
     ];
 
     let results = execute_to_batches(&ctx, sql).await;
@@ -1900,12 +1900,12 @@ async fn sort_merge_join_on_date32() -> Result<()> {
     );
 
     let expected = vec![
-        
"+------------+------------+---------+-----+------------+------------+---------+-----+",
-        "| c1         | c2         | c3      | c4  | c1         | c2         | 
c3      | c4  |",
-        
"+------------+------------+---------+-----+------------+------------+---------+-----+",
-        "| 1970-01-02 | 1970-01-02 | 1.23    | abc | 1970-01-02 | 1970-01-02 | 
-123.12 | abc |",
-        "| 1970-01-04 |            | -123.12 | jkl | 1970-01-04 |            | 
789.00  |     |",
-        
"+------------+------------+---------+-----+------------+------------+---------+-----+",
+        
"+------------+---------------------+---------+-----+------------+---------------------+---------+-----+",
+        "| c1         | c2                  | c3      | c4  | c1         | c2  
                | c3      | c4  |",
+        
"+------------+---------------------+---------+-----+------------+---------------------+---------+-----+",
+        "| 1970-01-02 | 1970-01-02T00:00:00 | 1.23    | abc | 1970-01-02 | 
1970-01-02T00:00:00 | -123.12 | abc |",
+        "| 1970-01-04 |                     | -123.12 | jkl | 1970-01-04 |     
                | 789.00  |     |",
+        
"+------------+---------------------+---------+-----+------------+---------------------+---------+-----+",
     ];
 
     let results = execute_to_batches(&ctx, sql).await;
@@ -1947,14 +1947,14 @@ async fn sort_merge_join_on_decimal() -> Result<()> {
     );
 
     let expected = vec![
-        
"+------------+------------+---------+-----+------------+------------+-----------+---------+",
-        "| c1         | c2         | c3      | c4  | c1         | c2         | 
c3        | c4      |",
-        
"+------------+------------+---------+-----+------------+------------+-----------+---------+",
-        "|            |            |         |     |            |            | 
100000.00 | abcdefg |",
-        "|            |            |         |     |            | 1970-01-04 | 
0.00      | qwerty  |",
-        "|            | 1970-01-04 | 789.00  | ghi | 1970-01-04 |            | 
789.00    |         |",
-        "| 1970-01-04 |            | -123.12 | jkl | 1970-01-02 | 1970-01-02 | 
-123.12   | abc     |",
-        
"+------------+------------+---------+-----+------------+------------+-----------+---------+",
+        
"+------------+---------------------+---------+-----+------------+---------------------+-----------+---------+",
+        "| c1         | c2                  | c3      | c4  | c1         | c2  
                | c3        | c4      |",
+        
"+------------+---------------------+---------+-----+------------+---------------------+-----------+---------+",
+        "|            |                     |         |     |            |     
                | 100000.00 | abcdefg |",
+        "|            |                     |         |     |            | 
1970-01-04T00:00:00 | 0.00      | qwerty  |",
+        "|            | 1970-01-04T00:00:00 | 789.00  | ghi | 1970-01-04 |     
                | 789.00    |         |",
+        "| 1970-01-04 |                     | -123.12 | jkl | 1970-01-02 | 
1970-01-02T00:00:00 | -123.12   | abc     |",
+        
"+------------+---------------------+---------+-----+------------+---------------------+-----------+---------+",
     ];
 
     let results = execute_to_batches(&ctx, sql).await;
@@ -2980,18 +2980,18 @@ async fn 
test_cross_join_to_groupby_with_different_key_ordering() -> Result<()>
         "+------+------+----------+",
         "| col1 | col2 | sum_col3 |",
         "+------+------+----------+",
-        "| A    | 1    | 2        |",
-        "| A    | 2    | 2        |",
-        "| A    | 3    | 2        |",
-        "| A    | 4    | 2        |",
-        "| A    | 5    | 0        |",
-        "| A    | 6    | 0        |",
-        "| BB   | 1    | 0        |",
-        "| BB   | 2    | 0        |",
-        "| BB   | 3    | 0        |",
-        "| BB   | 4    | 0        |",
-        "| BB   | 5    | 2        |",
-        "| BB   | 6    | 2        |",
+        "| A    | 1    | 2.0      |",
+        "| A    | 2    | 2.0      |",
+        "| A    | 3    | 2.0      |",
+        "| A    | 4    | 2.0      |",
+        "| A    | 5    | 0.0      |",
+        "| A    | 6    | 0.0      |",
+        "| BB   | 1    | 0.0      |",
+        "| BB   | 2    | 0.0      |",
+        "| BB   | 3    | 0.0      |",
+        "| BB   | 4    | 0.0      |",
+        "| BB   | 5    | 2.0      |",
+        "| BB   | 6    | 2.0      |",
         "+------+------+----------+",
     ];
 
diff --git a/datafusion/core/tests/sql/json.rs 
b/datafusion/core/tests/sql/json.rs
index 26ddff61d..ad5a4c808 100644
--- a/datafusion/core/tests/sql/json.rs
+++ b/datafusion/core/tests/sql/json.rs
@@ -33,16 +33,16 @@ async fn json_query() {
         "+-----------------+------+",
         "| a               | b    |",
         "+-----------------+------+",
-        "| 1               | 2    |",
+        "| 1               | 2.0  |",
         "| -10             | -3.5 |",
         "| 2               | 0.6  |",
-        "| 1               | 2    |",
+        "| 1               | 2.0  |",
         "| 7               | -3.5 |",
         "| 1               | 0.6  |",
-        "| 1               | 2    |",
+        "| 1               | 2.0  |",
         "| 5               | -3.5 |",
         "| 1               | 0.6  |",
-        "| 1               | 2    |",
+        "| 1               | 2.0  |",
         "| 1               | -3.5 |",
         "| 100000000000000 | 0.6  |",
         "+-----------------+------+",
diff --git a/datafusion/core/tests/sql/select.rs 
b/datafusion/core/tests/sql/select.rs
index 124f25d36..00cc06bb4 100644
--- a/datafusion/core/tests/sql/select.rs
+++ b/datafusion/core/tests/sql/select.rs
@@ -278,15 +278,15 @@ async fn select_distinct_simple_2() {
     let actual = execute_to_batches(&ctx, sql).await;
 
     let expected = vec![
-        "+---------+----------------+",
-        "| c1      | c2             |",
-        "+---------+----------------+",
-        "| 0.00001 | 0.000000000001 |",
-        "| 0.00002 | 0.000000000002 |",
-        "| 0.00003 | 0.000000000003 |",
-        "| 0.00004 | 0.000000000004 |",
-        "| 0.00005 | 0.000000000005 |",
-        "+---------+----------------+",
+        "+---------+---------+",
+        "| c1      | c2      |",
+        "+---------+---------+",
+        "| 0.00001 | 1.0e-12 |",
+        "| 0.00002 | 2.0e-12 |",
+        "| 0.00003 | 3.0e-12 |",
+        "| 0.00004 | 4.0e-12 |",
+        "| 0.00005 | 5.0e-12 |",
+        "+---------+---------+",
     ];
     assert_batches_eq!(expected, &actual);
 }
diff --git a/datafusion/core/tests/sql/set_variable.rs 
b/datafusion/core/tests/sql/set_variable.rs
index 6a85f7df1..b7161eb2b 100644
--- a/datafusion/core/tests/sql/set_variable.rs
+++ b/datafusion/core/tests/sql/set_variable.rs
@@ -16,6 +16,7 @@
 // under the License.
 
 use super::*;
+use arrow::util::pretty::pretty_format_batches;
 
 #[tokio::test]
 async fn set_variable_to_value() {
@@ -412,14 +413,8 @@ async fn set_time_zone_bad_time_zone_format() {
         plan_and_collect(&ctx, "SELECT 
'2000-01-01T00:00:00'::TIMESTAMP::TIMESTAMPTZ")
             .await
             .unwrap();
-    let expected = vec![
-        "+-----------------------------------------------------+",
-        "| Utf8(\"2000-01-01T00:00:00\")                         |",
-        "+-----------------------------------------------------+",
-        "| 2000-01-01T00:00:00 (Unknown Time Zone '+08:00:00') |",
-        "+-----------------------------------------------------+",
-    ];
-    assert_batches_eq!(expected, &result);
+    let err = pretty_format_batches(&result).err().unwrap().to_string();
+    assert_eq!(err, "Parser error: Invalid timezone \"+08:00:00\": Expected 
format [+-]XX:XX, [+-]XX, or [+-]XXXX");
 
     plan_and_collect(&ctx, "SET TIME ZONE = '08:00'")
         .await
@@ -430,14 +425,9 @@ async fn set_time_zone_bad_time_zone_format() {
         plan_and_collect(&ctx, "SELECT 
'2000-01-01T00:00:00'::TIMESTAMP::TIMESTAMPTZ")
             .await
             .unwrap();
-    let expected = vec![
-        "+-------------------------------------------------+",
-        "| Utf8(\"2000-01-01T00:00:00\")                     |",
-        "+-------------------------------------------------+",
-        "| 2000-01-01T00:00:00 (Unknown Time Zone '08:00') |",
-        "+-------------------------------------------------+",
-    ];
-    assert_batches_eq!(expected, &result);
+
+    let err = pretty_format_batches(&result).err().unwrap().to_string();
+    assert_eq!(err, "Parser error: Invalid timezone \"08:00\": only offset 
based timezones supported without chrono-tz feature");
 
     plan_and_collect(&ctx, "SET TIME ZONE = '08'")
         .await
@@ -448,14 +438,9 @@ async fn set_time_zone_bad_time_zone_format() {
         plan_and_collect(&ctx, "SELECT 
'2000-01-01T00:00:00'::TIMESTAMP::TIMESTAMPTZ")
             .await
             .unwrap();
-    let expected = vec![
-        "+----------------------------------------------+",
-        "| Utf8(\"2000-01-01T00:00:00\")                  |",
-        "+----------------------------------------------+",
-        "| 2000-01-01T00:00:00 (Unknown Time Zone '08') |",
-        "+----------------------------------------------+",
-    ];
-    assert_batches_eq!(expected, &result);
+
+    let err = pretty_format_batches(&result).err().unwrap().to_string();
+    assert_eq!(err, "Parser error: Invalid timezone \"08\": only offset based 
timezones supported without chrono-tz feature");
 
     // we dont support named time zone yet
     plan_and_collect(&ctx, "SET TIME ZONE = 'Asia/Taipei'")
@@ -467,14 +452,9 @@ async fn set_time_zone_bad_time_zone_format() {
         plan_and_collect(&ctx, "SELECT 
'2000-01-01T00:00:00'::TIMESTAMP::TIMESTAMPTZ")
             .await
             .unwrap();
-    let expected = vec![
-        "+-------------------------------------------------------+",
-        "| Utf8(\"2000-01-01T00:00:00\")                           |",
-        "+-------------------------------------------------------+",
-        "| 2000-01-01T00:00:00 (Unknown Time Zone 'Asia/Taipei') |",
-        "+-------------------------------------------------------+",
-    ];
-    assert_batches_eq!(expected, &result);
+
+    let err = pretty_format_batches(&result).err().unwrap().to_string();
+    assert_eq!(err, "Parser error: Invalid timezone \"Asia/Taipei\": only 
offset based timezones supported without chrono-tz feature");
 
     // this is invalid even after we support named time zone
     plan_and_collect(&ctx, "SET TIME ZONE = 'Asia/Taipei2'")
@@ -486,12 +466,6 @@ async fn set_time_zone_bad_time_zone_format() {
         plan_and_collect(&ctx, "SELECT 
'2000-01-01T00:00:00'::TIMESTAMP::TIMESTAMPTZ")
             .await
             .unwrap();
-    let expected = vec![
-        "+--------------------------------------------------------+",
-        "| Utf8(\"2000-01-01T00:00:00\")                            |",
-        "+--------------------------------------------------------+",
-        "| 2000-01-01T00:00:00 (Unknown Time Zone 'Asia/Taipei2') |",
-        "+--------------------------------------------------------+",
-    ];
-    assert_batches_eq!(expected, &result);
+    let err = pretty_format_batches(&result).err().unwrap().to_string();
+    assert_eq!(err, "Parser error: Invalid timezone \"Asia/Taipei2\": only 
offset based timezones supported without chrono-tz feature");
 }
diff --git a/datafusion/core/tests/sql/udf.rs b/datafusion/core/tests/sql/udf.rs
index 0688aa319..4b68f30e9 100644
--- a/datafusion/core/tests/sql/udf.rs
+++ b/datafusion/core/tests/sql/udf.rs
@@ -158,7 +158,7 @@ async fn simple_udaf() -> Result<()> {
         "+-------------+",
         "| my_avg(t.a) |",
         "+-------------+",
-        "| 3           |",
+        "| 3.0         |",
         "+-------------+",
     ];
     assert_batches_eq!(expected, &result);
diff --git a/datafusion/core/tests/sql/wildcard.rs 
b/datafusion/core/tests/sql/wildcard.rs
index a55ccb80f..8cecfd829 100644
--- a/datafusion/core/tests/sql/wildcard.rs
+++ b/datafusion/core/tests/sql/wildcard.rs
@@ -26,25 +26,25 @@ async fn select_qualified_wildcard() -> Result<()> {
     let results = execute_to_batches(&ctx, sql).await;
 
     let expected = vec![
-        "+---------+----------------+-------+",
-        "| c1      | c2             | c3    |",
-        "+---------+----------------+-------+",
-        "| 0.00001 | 0.000000000001 | true  |",
-        "| 0.00002 | 0.000000000002 | false |",
-        "| 0.00002 | 0.000000000002 | false |",
-        "| 0.00003 | 0.000000000003 | true  |",
-        "| 0.00003 | 0.000000000003 | true  |",
-        "| 0.00003 | 0.000000000003 | true  |",
-        "| 0.00004 | 0.000000000004 | false |",
-        "| 0.00004 | 0.000000000004 | false |",
-        "| 0.00004 | 0.000000000004 | false |",
-        "| 0.00004 | 0.000000000004 | false |",
-        "| 0.00005 | 0.000000000005 | true  |",
-        "| 0.00005 | 0.000000000005 | true  |",
-        "| 0.00005 | 0.000000000005 | true  |",
-        "| 0.00005 | 0.000000000005 | true  |",
-        "| 0.00005 | 0.000000000005 | true  |",
-        "+---------+----------------+-------+",
+        "+---------+---------+-------+",
+        "| c1      | c2      | c3    |",
+        "+---------+---------+-------+",
+        "| 0.00001 | 1.0e-12 | true  |",
+        "| 0.00002 | 2.0e-12 | false |",
+        "| 0.00002 | 2.0e-12 | false |",
+        "| 0.00003 | 3.0e-12 | true  |",
+        "| 0.00003 | 3.0e-12 | true  |",
+        "| 0.00003 | 3.0e-12 | true  |",
+        "| 0.00004 | 4.0e-12 | false |",
+        "| 0.00004 | 4.0e-12 | false |",
+        "| 0.00004 | 4.0e-12 | false |",
+        "| 0.00004 | 4.0e-12 | false |",
+        "| 0.00005 | 5.0e-12 | true  |",
+        "| 0.00005 | 5.0e-12 | true  |",
+        "| 0.00005 | 5.0e-12 | true  |",
+        "| 0.00005 | 5.0e-12 | true  |",
+        "| 0.00005 | 5.0e-12 | true  |",
+        "+---------+---------+-------+",
     ];
 
     assert_batches_eq!(expected, &results);
@@ -61,25 +61,25 @@ async fn select_non_alias_qualified_wildcard() -> 
Result<()> {
     let results = execute_to_batches(&ctx, sql).await;
 
     let expected = vec![
-        "+---------+----------------+-------+",
-        "| c1      | c2             | c3    |",
-        "+---------+----------------+-------+",
-        "| 0.00001 | 0.000000000001 | true  |",
-        "| 0.00002 | 0.000000000002 | false |",
-        "| 0.00002 | 0.000000000002 | false |",
-        "| 0.00003 | 0.000000000003 | true  |",
-        "| 0.00003 | 0.000000000003 | true  |",
-        "| 0.00003 | 0.000000000003 | true  |",
-        "| 0.00004 | 0.000000000004 | false |",
-        "| 0.00004 | 0.000000000004 | false |",
-        "| 0.00004 | 0.000000000004 | false |",
-        "| 0.00004 | 0.000000000004 | false |",
-        "| 0.00005 | 0.000000000005 | true  |",
-        "| 0.00005 | 0.000000000005 | true  |",
-        "| 0.00005 | 0.000000000005 | true  |",
-        "| 0.00005 | 0.000000000005 | true  |",
-        "| 0.00005 | 0.000000000005 | true  |",
-        "+---------+----------------+-------+",
+        "+---------+---------+-------+",
+        "| c1      | c2      | c3    |",
+        "+---------+---------+-------+",
+        "| 0.00001 | 1.0e-12 | true  |",
+        "| 0.00002 | 2.0e-12 | false |",
+        "| 0.00002 | 2.0e-12 | false |",
+        "| 0.00003 | 3.0e-12 | true  |",
+        "| 0.00003 | 3.0e-12 | true  |",
+        "| 0.00003 | 3.0e-12 | true  |",
+        "| 0.00004 | 4.0e-12 | false |",
+        "| 0.00004 | 4.0e-12 | false |",
+        "| 0.00004 | 4.0e-12 | false |",
+        "| 0.00004 | 4.0e-12 | false |",
+        "| 0.00005 | 5.0e-12 | true  |",
+        "| 0.00005 | 5.0e-12 | true  |",
+        "| 0.00005 | 5.0e-12 | true  |",
+        "| 0.00005 | 5.0e-12 | true  |",
+        "| 0.00005 | 5.0e-12 | true  |",
+        "+---------+---------+-------+",
     ];
 
     assert_batches_eq!(expected, &results);
diff --git a/datafusion/core/tests/sql/window.rs 
b/datafusion/core/tests/sql/window.rs
index 7ef4af23a..9087fe989 100644
--- a/datafusion/core/tests/sql/window.rs
+++ b/datafusion/core/tests/sql/window.rs
@@ -103,7 +103,7 @@ async fn window_frame_rows_preceding() -> Result<()> {
         "| -48302                     | -16100.666666666666        | 3         
      |",
         "| 11243                      | 3747.6666666666665         | 3         
      |",
         "| -51311                     | -17103.666666666668        | 3         
      |",
-        "| -2391                      | -797                       | 3         
      |",
+        "| -2391                      | -797.0                     | 3         
      |",
         "| 46756                      | 15585.333333333334         | 3         
      |",
         
"+----------------------------+----------------------------+-----------------+",
     ];
@@ -158,7 +158,7 @@ async fn 
window_frame_rows_preceding_with_partition_unique_order_by() -> Result<
         "| -38611                     | -19305.5                   | 2         
      |",
         "| 17547                      | 8773.5                     | 2         
      |",
         "| -1301                      | -650.5                     | 2         
      |",
-        "| 26638                      | 13319                      | 3         
      |",
+        "| 26638                      | 13319.0                    | 3         
      |",
         "| 26861                      | 8953.666666666666          | 3         
      |",
         
"+----------------------------+----------------------------+-----------------+",
     ];
@@ -1223,9 +1223,9 @@ async fn test_window_rank() -> Result<()> {
         
"+-----------+-------+-------+-------------+-------------+---------------------+---------------------+",
         "| 28774375  | 80    | 80    | 5           | 5           | 
0.797979797979798   | 0.797979797979798   |",
         "| 63044568  | 62    | 62    | 4           | 4           | 
0.6161616161616161  | 0.6161616161616161  |",
-        "| 141047417 | 1     | 1     | 1           | 1           | 0           
        | 0                   |",
+        "| 141047417 | 1     | 1     | 1           | 1           | 0.0         
        | 0.0                 |",
         "| 141680161 | 41    | 41    | 3           | 3           | 
0.40404040404040403 | 0.40404040404040403 |",
-        "| 145294611 | 1     | 1     | 1           | 1           | 0           
        | 0                   |",
+        "| 145294611 | 1     | 1     | 1           | 1           | 0.0         
        | 0.0                 |",
         
"+-----------+-------+-------+-------------+-------------+---------------------+---------------------+",
     ];
     assert_batches_eq!(expected, &actual);
@@ -2521,11 +2521,11 @@ mod tests {
             
"+------+------+------+------+------+------+--------+--------+-------------------+-------------------+",
             "| sum1 | sum2 | min1 | min2 | max1 | max2 | count1 | count2 | 
avg1              | avg2              |",
             
"+------+------+------+------+------+------+--------+--------+-------------------+-------------------+",
-            "| 16   | 6    | 1    | 1    | 10   | 5    | 3      | 2      | 
5.333333333333333 | 3                 |",
-            "| 16   | 6    | 1    | 1    | 10   | 5    | 3      | 2      | 
5.333333333333333 | 3                 |",
+            "| 16   | 6    | 1    | 1    | 10   | 5    | 3      | 2      | 
5.333333333333333 | 3.0               |",
+            "| 16   | 6    | 1    | 1    | 10   | 5    | 3      | 2      | 
5.333333333333333 | 3.0               |",
             "| 51   | 16   | 1    | 1    | 20   | 10   | 5      | 3      | 
10.2              | 5.333333333333333 |",
-            "| 72   | 72   | 1    | 1    | 21   | 21   | 6      | 6      | 12  
              | 12                |",
-            "| 72   | 72   | 1    | 1    | 21   | 21   | 6      | 6      | 12  
              | 12                |",
+            "| 72   | 72   | 1    | 1    | 21   | 21   | 6      | 6      | 
12.0              | 12.0              |",
+            "| 72   | 72   | 1    | 1    | 21   | 21   | 6      | 6      | 
12.0              | 12.0              |",
             
"+------+------+------+------+------+------+--------+--------+-------------------+-------------------+",
         ];
         assert_batches_eq!(expected, &actual);
diff --git a/datafusion/core/tests/user_defined_aggregates.rs 
b/datafusion/core/tests/user_defined_aggregates.rs
index b00ad12a5..25183a1b2 100644
--- a/datafusion/core/tests/user_defined_aggregates.rs
+++ b/datafusion/core/tests/user_defined_aggregates.rs
@@ -44,11 +44,11 @@ async fn test_udf_returning_struct() {
     let ctx = udaf_struct_context();
     let sql = "SELECT first(value, time) from t";
     let expected = vec![
-        "+--------------------------------------------------+",
-        "| first(t.value,t.time)                            |",
-        "+--------------------------------------------------+",
-        "| {\"value\": 2, \"time\": 1970-01-01T00:00:00.000002} |",
-        "+--------------------------------------------------+",
+        "+------------------------------------------------+",
+        "| first(t.value,t.time)                          |",
+        "+------------------------------------------------+",
+        "| {value: 2.0, time: 1970-01-01T00:00:00.000002} |",
+        "+------------------------------------------------+",
     ];
     assert_batches_eq!(expected, &execute(&ctx, sql).await);
 }
@@ -62,7 +62,7 @@ async fn test_udf_returning_struct_sq() {
         "+-----------------+----------------------------+",
         "| sq.first[value] | sq.first[time]             |",
         "+-----------------+----------------------------+",
-        "| 2               | 1970-01-01T00:00:00.000002 |",
+        "| 2.0             | 1970-01-01T00:00:00.000002 |",
         "+-----------------+----------------------------+",
     ];
     assert_batches_eq!(expected, &execute(&ctx, sql).await);
diff --git a/datafusion/expr/Cargo.toml b/datafusion/expr/Cargo.toml
index dc7239146..8d017401c 100644
--- a/datafusion/expr/Cargo.toml
+++ b/datafusion/expr/Cargo.toml
@@ -36,7 +36,7 @@ path = "src/lib.rs"
 
 [dependencies]
 ahash = { version = "0.8", default-features = false, features = 
["runtime-rng"] }
-arrow = { version = "32.0.0", default-features = false }
+arrow = { version = "33.0.0", default-features = false }
 datafusion-common = { path = "../common", version = "18.0.0" }
 log = "^0.4"
 sqlparser = "0.30"
diff --git a/datafusion/jit/Cargo.toml b/datafusion/jit/Cargo.toml
index ae1e611aa..6402dbc84 100644
--- a/datafusion/jit/Cargo.toml
+++ b/datafusion/jit/Cargo.toml
@@ -36,7 +36,7 @@ path = "src/lib.rs"
 jit = []
 
 [dependencies]
-arrow = { version = "32.0.0", default-features = false }
+arrow = { version = "33.0.0", default-features = false }
 cranelift = "0.89.0"
 cranelift-jit = "0.89.0"
 cranelift-module = "0.89.0"
diff --git a/datafusion/optimizer/Cargo.toml b/datafusion/optimizer/Cargo.toml
index 97a93e192..74b34c2b1 100644
--- a/datafusion/optimizer/Cargo.toml
+++ b/datafusion/optimizer/Cargo.toml
@@ -37,7 +37,7 @@ default = ["unicode_expressions"]
 unicode_expressions = []
 
 [dependencies]
-arrow = { version = "32.0.0", features = ["prettyprint"] }
+arrow = { version = "33.0.0", features = ["prettyprint"] }
 async-trait = "0.1.41"
 chrono = { version = "0.4.23", default-features = false }
 datafusion-common = { path = "../common", version = "18.0.0" }
diff --git a/datafusion/physical-expr/Cargo.toml 
b/datafusion/physical-expr/Cargo.toml
index 8c0e1c623..44d9bf9e5 100644
--- a/datafusion/physical-expr/Cargo.toml
+++ b/datafusion/physical-expr/Cargo.toml
@@ -43,9 +43,9 @@ unicode_expressions = ["unicode-segmentation"]
 
 [dependencies]
 ahash = { version = "0.8", default-features = false, features = 
["runtime-rng"] }
-arrow = { version = "32.0.0", features = ["prettyprint"] }
-arrow-buffer = "32.0.0"
-arrow-schema = "32.0.0"
+arrow = { version = "33.0.0", features = ["prettyprint"] }
+arrow-buffer = "33.0.0"
+arrow-schema = "33.0.0"
 blake2 = { version = "^0.10.2", optional = true }
 blake3 = { version = "1.0", optional = true }
 chrono = { version = "0.4.23", default-features = false }
diff --git a/datafusion/proto/Cargo.toml b/datafusion/proto/Cargo.toml
index 7a6529c25..6746325ab 100644
--- a/datafusion/proto/Cargo.toml
+++ b/datafusion/proto/Cargo.toml
@@ -40,7 +40,7 @@ default = []
 json = ["pbjson", "serde", "serde_json"]
 
 [dependencies]
-arrow = "32.0.0"
+arrow = "33.0.0"
 chrono = { version = "0.4", default-features = false }
 datafusion = { path = "../core", version = "18.0.0" }
 datafusion-common = { path = "../common", version = "18.0.0" }
diff --git a/datafusion/row/Cargo.toml b/datafusion/row/Cargo.toml
index 62cdf8897..344d110d6 100644
--- a/datafusion/row/Cargo.toml
+++ b/datafusion/row/Cargo.toml
@@ -37,7 +37,7 @@ path = "src/lib.rs"
 jit = ["datafusion-jit"]
 
 [dependencies]
-arrow = "32.0.0"
+arrow = "33.0.0"
 datafusion-common = { path = "../common", version = "18.0.0" }
 datafusion-jit = { path = "../jit", version = "18.0.0", optional = true }
 paste = "^1.0"
diff --git a/datafusion/sql/Cargo.toml b/datafusion/sql/Cargo.toml
index 62c1def03..cc37b041d 100644
--- a/datafusion/sql/Cargo.toml
+++ b/datafusion/sql/Cargo.toml
@@ -37,7 +37,7 @@ default = ["unicode_expressions"]
 unicode_expressions = []
 
 [dependencies]
-arrow-schema = "32.0.0"
+arrow-schema = "33.0.0"
 datafusion-common = { path = "../common", version = "18.0.0" }
 datafusion-expr = { path = "../expr", version = "18.0.0" }
 log = "^0.4"
diff --git a/parquet-test-utils/Cargo.toml b/parquet-test-utils/Cargo.toml
index c63af8362..2083e527d 100644
--- a/parquet-test-utils/Cargo.toml
+++ b/parquet-test-utils/Cargo.toml
@@ -25,4 +25,4 @@ edition = "2021"
 [dependencies]
 datafusion = { path = "../datafusion/core" }
 object_store = "0.5.4"
-parquet = "32.0.0"
+parquet = "33.0.0"
diff --git a/test-utils/Cargo.toml b/test-utils/Cargo.toml
index 2d591926b..4aeaef068 100644
--- a/test-utils/Cargo.toml
+++ b/test-utils/Cargo.toml
@@ -23,7 +23,7 @@ edition = "2021"
 # See more keys and their definitions at 
https://doc.rust-lang.org/cargo/reference/manifest.html
 
 [dependencies]
-arrow = { version = "32.0.0", features = ["prettyprint"] }
+arrow = { version = "33.0.0", features = ["prettyprint"] }
 datafusion-common = { path = "../datafusion/common" }
 env_logger = "0.10.0"
 rand = "0.8"

Reply via email to