nealrichardson commented on code in PR #12817:
URL: https://github.com/apache/arrow/pull/12817#discussion_r853283433


##########
r/R/array.R:
##########
@@ -217,6 +217,125 @@ Array$create <- function(x, type = NULL) {
 Array$import_from_c <- ImportArray
 
 
+#' Convert an object to an Arrow Array
+#'
+#' Whereas `Array$create()` constructs an [Array] from the built-in data types
+#' for which the Arrow package implements fast converters, `as_arrow_array()`
+#' provides a means by which other packages can define conversions to Arrow
+#' objects.
+#'
+#' @param x An object to convert to an Arrow Array
+#' @param ... Passed to S3 methods
+#' @param type A [type][data-type] for the final Array. A value of `NULL`
+#'   will default to the type guessed by [type()].
+#'
+#' @return An [Array].
+#' @export
+#'
+#' @examplesIf arrow_available()
+#' as_arrow_array(1:5)
+#'
+as_arrow_array <- function(x, ..., type = NULL) {
+  UseMethod("as_arrow_array")
+}
+
+#' @rdname as_arrow_array
+#' @export
+as_arrow_array.Array <- function(x, ..., type = NULL) {
+  if (is.null(type)) {
+    x
+  } else {
+    x$cast(type)
+  }
+}
+
+#' @rdname as_arrow_array
+#' @export
+as_arrow_array.ChunkedArray <- function(x, ..., type = NULL) {
+  concat_arrays(!!! x$chunks, type = type)
+}
+
+#' @rdname as_arrow_array
+#' @export
+as_arrow_array.vctrs_vctr <- function(x, ..., type = NULL) {
+  if (is.null(type)) {
+    vctrs_extension_array(x)
+  } else if (inherits(type, "VctrsExtensionType")) {
+    vctrs_extension_array(
+      x,
+      ptype = type$ptype(),
+      storage_type = type$storage_type()
+    )
+  } else {
+    stop_cant_convert_array(x, type)

Review Comment:
   Why not NextMethod anymore?



##########
r/R/array.R:
##########
@@ -217,6 +217,125 @@ Array$create <- function(x, type = NULL) {
 Array$import_from_c <- ImportArray
 
 
+#' Convert an object to an Arrow Array
+#'
+#' Whereas `Array$create()` constructs an [Array] from the built-in data types
+#' for which the Arrow package implements fast converters, `as_arrow_array()`
+#' provides a means by which other packages can define conversions to Arrow
+#' objects.
+#'
+#' @param x An object to convert to an Arrow Array
+#' @param ... Passed to S3 methods
+#' @param type A [type][data-type] for the final Array. A value of `NULL`
+#'   will default to the type guessed by [type()].
+#'
+#' @return An [Array].
+#' @export
+#'
+#' @examplesIf arrow_available()

Review Comment:
   arrow is always available now
   
   ```suggestion
   #' @examples
   ```



##########
r/R/array.R:
##########
@@ -217,6 +217,125 @@ Array$create <- function(x, type = NULL) {
 Array$import_from_c <- ImportArray
 
 
+#' Convert an object to an Arrow Array
+#'
+#' Whereas `Array$create()` constructs an [Array] from the built-in data types

Review Comment:
   This leaves me with some questions. Are `as_arrow_array()` conversions not 
optimized? If `as_arrow_array()` is going to call the regular Arrow converters 
for normal types, and it would work with extension types, why would I ever call 
`Array$create()`? When would `Array$create()` and `as_arrow_array()` do 
different things?
   
   Maybe you can illustrate through some `@examples`. 



##########
r/R/array.R:
##########
@@ -217,6 +217,125 @@ Array$create <- function(x, type = NULL) {
 Array$import_from_c <- ImportArray
 
 
+#' Convert an object to an Arrow Array
+#'
+#' Whereas `Array$create()` constructs an [Array] from the built-in data types
+#' for which the Arrow package implements fast converters, `as_arrow_array()`
+#' provides a means by which other packages can define conversions to Arrow
+#' objects.
+#'
+#' @param x An object to convert to an Arrow Array
+#' @param ... Passed to S3 methods
+#' @param type A [type][data-type] for the final Array. A value of `NULL`
+#'   will default to the type guessed by [type()].
+#'
+#' @return An [Array].
+#' @export
+#'
+#' @examplesIf arrow_available()
+#' as_arrow_array(1:5)
+#'
+as_arrow_array <- function(x, ..., type = NULL) {
+  UseMethod("as_arrow_array")
+}
+
+#' @rdname as_arrow_array
+#' @export
+as_arrow_array.Array <- function(x, ..., type = NULL) {
+  if (is.null(type)) {
+    x
+  } else {
+    x$cast(type)
+  }
+}
+
+#' @rdname as_arrow_array
+#' @export
+as_arrow_array.ChunkedArray <- function(x, ..., type = NULL) {
+  concat_arrays(!!! x$chunks, type = type)
+}
+
+#' @rdname as_arrow_array
+#' @export
+as_arrow_array.vctrs_vctr <- function(x, ..., type = NULL) {
+  if (is.null(type)) {
+    vctrs_extension_array(x)
+  } else if (inherits(type, "VctrsExtensionType")) {
+    vctrs_extension_array(
+      x,
+      ptype = type$ptype(),
+      storage_type = type$storage_type()
+    )
+  } else {
+    stop_cant_convert_array(x, type)
+  }
+}
+
+#' @export
+as_arrow_array.POSIXlt <- function(x, ..., type = NULL) {
+  as_arrow_array.vctrs_vctr(x, ..., type = type)
+}
+
+#' @export
+as_arrow_array.data.frame <- function(x, ..., type = NULL) {
+  type <- type %||% infer_type(x)
+
+  if (inherits(type, "VctrsExtensionType")) {
+    storage <- as_arrow_array(x, type = type$storage_type())
+    new_extension_array(storage, type)
+  } else if (inherits(type, "StructType")) {
+    fields <- type$fields()
+    names <- map_chr(fields, "name")
+    types <- map(fields, "type")
+    arrays <- Map(as_arrow_array, x, types)
+    names(arrays) <- names
+
+    # ...because there isn't a StructArray$create() yet
+    batch <- record_batch(!!! arrays)
+    array_ptr <- allocate_arrow_array()
+    schema_ptr <- allocate_arrow_schema()
+    batch$export_to_c(array_ptr, schema_ptr)
+    Array$import_from_c(array_ptr, schema_ptr)
+  } else {
+    stop_cant_convert_array(x, type)
+  }
+}
+
+#' @export
+as_arrow_array.default <- function(x, ..., type = NULL, from_constructor = 
FALSE) {
+  # If from_constructor is TRUE, this is a call from C++ for which S3 dispatch
+  # failed to find a method for the object. If this is the case, we error.

Review Comment:
   Presumably C++ has also tried using the regular type inference as well?



##########
r/R/record-batch.R:
##########
@@ -189,3 +189,56 @@ record_batch <- RecordBatch$create
 
 #' @export
 names.RecordBatch <- function(x) x$names()
+
+#' Convert an object to an Arrow RecordBatch
+#'
+#' Whereas [record_batch()] constructs a [RecordBatch] from one or more 
columns,
+#' `as_record_batch()` converts a single object to an Arrow [RecordBatch].
+#'
+#' @param x An object to convert to an Arrow RecordBatch
+#' @param ... Passed to S3 methods
+#' @inheritParams record_batch
+#'
+#' @return A [RecordBatch]
+#' @export
+#'
+#' @examplesIf arrow_available()

Review Comment:
   ```suggestion
   #' @examples
   ```



##########
r/tests/testthat/test-Array.R:
##########
@@ -1010,6 +1010,110 @@ test_that("auto int64 conversion to int can be disabled 
(ARROW-10093)", {
   })
 })
 
+test_that("as_arrow_array() default method calls Array$create()", {
+  expect_equal(
+    as_arrow_array(1:10),
+    Array$create(1:10)
+  )
+
+  expect_equal(
+    as_arrow_array(1:10, type = float64()),
+    Array$create(1:10, type = float64())
+  )
+})
+
+test_that("as_arrow_array() works for Array", {
+  array <- Array$create(logical(), type = null())
+  expect_identical(as_arrow_array(array), array)
+  expect_equal(
+    as_arrow_array(array, type = int32()),
+    Array$create(integer())
+  )
+})
+
+test_that("as_arrow_array() works for ChunkedArray", {
+  expect_equal(
+    as_arrow_array(chunked_array(type = null())),
+    Array$create(logical(), type = null())
+  )
+
+  expect_equal(
+    as_arrow_array(chunked_array(1:3, 4:6)),
+    Array$create(1:6)
+  )
+
+  expect_equal(
+    as_arrow_array(chunked_array(1:3, 4:6), type = float64()),
+    Array$create(1:6, type = float64())
+  )
+})
+
+test_that("as_arrow_array() works for vctr_vctr types", {
+  vctr <- vctrs::new_vctr(1:5, class = "custom_vctr")
+  expect_equal(
+    as_arrow_array(vctr),
+    vctrs_extension_array(vctr)
+  )
+
+  # with explicit type
+  expect_equal(
+    as_arrow_array(
+      vctr,
+      type = vctrs_extension_type(
+        vctrs::vec_ptype(vctr),
+        storage_type = float64()
+      )
+    ),
+    vctrs_extension_array(
+      vctr,
+      storage_type = float64()
+    )
+  )
+
+  # with impossible type
+  expect_snapshot_error(as_arrow_array(vctr, type = float64()))
+})
+
+test_that("as_arrow_array() works for nested extension types", {
+  vctr <- vctrs::new_vctr(1:5, class = "custom_vctr")
+
+  nested <- tibble::tibble(x = vctr)
+  type <- infer_type(nested)
+
+  # with type = NULL
+  nested_array <- as_arrow_array(nested)
+  expect_identical(as.vector(nested_array), nested)
+
+  # with explicit type
+  expect_equal(as_arrow_array(nested, type = type), nested_array)
+
+  # with extension type
+  extension_array <- vctrs_extension_array(nested)
+  expect_equal(
+    as_arrow_array(nested, type = extension_array$type),
+    extension_array
+  )
+})
+
+test_that("as_arrow_array() default method errors for impossible cases", {
+  vec <- structure(list(), class = "class_not_supported")
+
+  # check errors simulating a call from C++
+  expect_snapshot_error(as_arrow_array(vec, from_constructor = TRUE))
+  expect_snapshot_error(
+    as_arrow_array(vec, type = float64(), from_constructor = TRUE)
+  )
+
+  # check that the errors propagate through Array$create()
+  type.class_not_supported <- function(x, ...) {

Review Comment:
   Didn't you name this to infer_type? 



##########
r/tests/testthat/test-type.R:
##########
@@ -242,3 +268,19 @@ test_that("type() gets the right type for Expression", {
   expect_equal(add_xy$type(), type(add_xy))
   expect_equal(type(add_xy), float64())
 })
+
+test_that("type() infers type for POSIXlt", {

Review Comment:
   This is now infer_type()?



##########
r/tests/testthat/test-ipc_stream.R:
##########
@@ -0,0 +1,30 @@
+# Licensed to the Apache Software Foundation (ASF) under one

Review Comment:
   Can you please rename this file to `test-ipc-stream.R` for consistency?



##########
r/tests/testthat/test-backwards-compatibility.R:
##########
@@ -112,10 +112,36 @@ for (comp in c("lz4", "uncompressed", "zstd")) {
     # not maintained and the embedded tibble's attributes are read in a wrong
     # order. Since this is prior to 1.0.0 punting on checking the attributes
     # though classes are always checked, so that must be removed before 
checking.
-    example_with_metadata_sans_special_class <- example_with_metadata
+    example_with_metadata_sans_special_class <- old_example_with_metadata
     example_with_metadata_sans_special_class$a <- 
unclass(example_with_metadata_sans_special_class$a)
     expect_equal(df, example_with_metadata_sans_special_class, ignore_attr = 
TRUE)
   })
 }
 
+test_that("sfc columns written by arrow <= 7.0.0 can be re-read", {
+  # nolint start
+  # df <- data.frame(x = I(list(structure(1, foo = "bar"), structure(2, baz = 
"qux"))))
+  # class(df$x) <- c("sfc_MULTIPOLYGON", "sfc", "list")
+  # withr::with_options(
+  #   list("arrow.preserve_row_level_metadata" = TRUE), {

Review Comment:
   But this isn't the default in 7.0.0 is it?



##########
r/R/record-batch.R:
##########
@@ -189,3 +189,56 @@ record_batch <- RecordBatch$create
 
 #' @export
 names.RecordBatch <- function(x) x$names()
+
+#' Convert an object to an Arrow RecordBatch
+#'
+#' Whereas [record_batch()] constructs a [RecordBatch] from one or more 
columns,
+#' `as_record_batch()` converts a single object to an Arrow [RecordBatch].
+#'
+#' @param x An object to convert to an Arrow RecordBatch
+#' @param ... Passed to S3 methods
+#' @inheritParams record_batch
+#'
+#' @return A [RecordBatch]
+#' @export
+#'
+#' @examplesIf arrow_available()
+#' as_record_batch(data.frame(col1 = 1, col2 = "two"))
+#'
+as_record_batch <- function(x, ..., schema = NULL) {
+  UseMethod("as_record_batch")
+}
+
+#' @rdname as_record_batch
+#' @export
+as_record_batch.RecordBatch <- function(x, ..., schema = NULL) {
+  if (is.null(schema)) {
+    x
+  } else {
+    x$cast(schema)
+  }
+}
+
+#' @rdname as_record_batch
+#' @export
+as_record_batch.Table <- function(x, ..., schema = NULL) {
+  if (x$num_columns == 0) {
+    batch <- record_batch(data.frame())
+    return(batch$Take(rep_len(0, x$num_rows)))
+  }
+
+  arrays_out <- lapply(x$columns, as_arrow_array)

Review Comment:
   Same comment on @wjones127 's PR, seems like Table$columns should return a 
named list, right (or what would the harm be in applying the names)?



##########
r/R/array.R:
##########
@@ -217,6 +217,125 @@ Array$create <- function(x, type = NULL) {
 Array$import_from_c <- ImportArray
 
 
+#' Convert an object to an Arrow Array
+#'
+#' Whereas `Array$create()` constructs an [Array] from the built-in data types
+#' for which the Arrow package implements fast converters, `as_arrow_array()`
+#' provides a means by which other packages can define conversions to Arrow
+#' objects.
+#'
+#' @param x An object to convert to an Arrow Array
+#' @param ... Passed to S3 methods
+#' @param type A [type][data-type] for the final Array. A value of `NULL`
+#'   will default to the type guessed by [type()].
+#'
+#' @return An [Array].
+#' @export
+#'
+#' @examplesIf arrow_available()
+#' as_arrow_array(1:5)
+#'
+as_arrow_array <- function(x, ..., type = NULL) {
+  UseMethod("as_arrow_array")
+}
+
+#' @rdname as_arrow_array
+#' @export
+as_arrow_array.Array <- function(x, ..., type = NULL) {
+  if (is.null(type)) {
+    x
+  } else {
+    x$cast(type)
+  }
+}
+
+#' @rdname as_arrow_array
+#' @export
+as_arrow_array.ChunkedArray <- function(x, ..., type = NULL) {
+  concat_arrays(!!! x$chunks, type = type)
+}
+
+#' @rdname as_arrow_array
+#' @export
+as_arrow_array.vctrs_vctr <- function(x, ..., type = NULL) {
+  if (is.null(type)) {
+    vctrs_extension_array(x)
+  } else if (inherits(type, "VctrsExtensionType")) {
+    vctrs_extension_array(
+      x,
+      ptype = type$ptype(),
+      storage_type = type$storage_type()
+    )
+  } else {
+    stop_cant_convert_array(x, type)
+  }
+}
+
+#' @export
+as_arrow_array.POSIXlt <- function(x, ..., type = NULL) {
+  as_arrow_array.vctrs_vctr(x, ..., type = type)
+}
+
+#' @export
+as_arrow_array.data.frame <- function(x, ..., type = NULL) {
+  type <- type %||% infer_type(x)
+
+  if (inherits(type, "VctrsExtensionType")) {
+    storage <- as_arrow_array(x, type = type$storage_type())
+    new_extension_array(storage, type)
+  } else if (inherits(type, "StructType")) {
+    fields <- type$fields()
+    names <- map_chr(fields, "name")
+    types <- map(fields, "type")
+    arrays <- Map(as_arrow_array, x, types)
+    names(arrays) <- names
+
+    # ...because there isn't a StructArray$create() yet

Review Comment:
   There isn't (but there is in C++), but I think the reason we didn't add 
bindings to create from a vector of Arrays is because 
`Array$create(data.frame)` produces a StructArray. Why can't we use that here?



##########
r/R/record-batch-reader.R:
##########
@@ -176,3 +176,61 @@ RecordBatchFileReader$create <- function(file) {
   assert_is(file, "InputStream")
   ipc___RecordBatchFileReader__Open(file)
 }
+
+#' Convert an object to an Arrow RecordBatchReader
+#'
+#' @param x An object to convert to a [RecordBatchReader]
+#' @param ... Passed to S3 methods
+#'
+#' @return A [RecordBatchReader]
+#' @export
+#'
+#' @examplesIf arrow_available() && arrow_with_dataset()
+#' reader <- as_record_batch_reader(data.frame(col1 = 1, col2 = "two"))
+#' reader$read_next_batch()
+#'
+as_record_batch_reader <- function(x, ...) {
+  UseMethod("as_record_batch_reader")
+}
+
+#' @rdname as_record_batch_reader
+#' @export
+as_record_batch_reader.RecordBatchReader <- function(x, ...) {
+  x
+}
+
+#' @rdname as_record_batch_reader
+#' @export
+as_record_batch_reader.Table <- function(x, ...) {
+  RecordBatchReader__from_Table(x)
+}
+
+#' @rdname as_record_batch_reader
+#' @export
+as_record_batch_reader.RecordBatch <- function(x, ...) {
+  as_record_batch_reader(as_arrow_table(x))
+}
+
+#' @rdname as_record_batch_reader
+#' @export
+as_record_batch_reader.data.frame <- function(x, ...) {
+  as_record_batch_reader(as_arrow_table(x))
+}
+
+#' @rdname as_record_batch_reader
+#' @export
+as_record_batch_reader.Dataset <- function(x, ...) {
+  Scanner$create(x)$ToRecordBatchReader()
+}
+
+#' @rdname as_record_batch_reader
+#' @export
+as_record_batch_reader.arrow_dplyr_query <- function(x, ...) {
+  as_record_batch_reader(collect.arrow_dplyr_query(x, as_data_frame = FALSE))

Review Comment:
   ```suggestion
     # TODO(ARROW-15271): make ExecPlan return RBR
     as_record_batch_reader(collect.arrow_dplyr_query(x, as_data_frame = FALSE))
   ```



##########
r/R/table.R:
##########
@@ -166,3 +166,63 @@ names.Table <- function(x) x$ColumnNames()
 #' as.data.frame(tbl[4:8, c("gear", "hp", "wt")])
 #' @export
 arrow_table <- Table$create
+
+
+#' Convert an object to an Arrow Table
+#'
+#' Whereas [arrow_table()] constructs a table from one or more columns,
+#' `as_arrow_table()` converts a single object to an Arrow [Table].
+#'
+#' @param x An object to convert to an Arrow Table
+#' @param ... Passed to S3 methods
+#' @inheritParams arrow_table
+#'
+#' @return A [Table]
+#' @export
+#'
+#' @examplesIf arrow_available()

Review Comment:
   ```suggestion
   #' @examples
   ```



##########
r/R/record-batch.R:
##########
@@ -189,3 +189,56 @@ record_batch <- RecordBatch$create
 
 #' @export
 names.RecordBatch <- function(x) x$names()
+
+#' Convert an object to an Arrow RecordBatch
+#'
+#' Whereas [record_batch()] constructs a [RecordBatch] from one or more 
columns,
+#' `as_record_batch()` converts a single object to an Arrow [RecordBatch].
+#'
+#' @param x An object to convert to an Arrow RecordBatch
+#' @param ... Passed to S3 methods
+#' @inheritParams record_batch
+#'
+#' @return A [RecordBatch]
+#' @export
+#'
+#' @examplesIf arrow_available()
+#' as_record_batch(data.frame(col1 = 1, col2 = "two"))
+#'
+as_record_batch <- function(x, ..., schema = NULL) {
+  UseMethod("as_record_batch")
+}
+
+#' @rdname as_record_batch
+#' @export
+as_record_batch.RecordBatch <- function(x, ..., schema = NULL) {
+  if (is.null(schema)) {
+    x
+  } else {
+    x$cast(schema)
+  }
+}
+
+#' @rdname as_record_batch
+#' @export
+as_record_batch.Table <- function(x, ..., schema = NULL) {
+  if (x$num_columns == 0) {
+    batch <- record_batch(data.frame())
+    return(batch$Take(rep_len(0, x$num_rows)))
+  }
+
+  arrays_out <- lapply(x$columns, as_arrow_array)
+  names(arrays_out) <- names(x)
+  out <- RecordBatch__from_arrays(NULL, arrays_out)

Review Comment:
   Or if you wanted to avoid calling the C++ wrapper directly (I personally try 
to limit the places that happens just so we make sure to check inputs to 
protect against segfault):
   
   ```suggestion
     out <- RecordBatch$create(!!!arrays_out)
   ```



##########
r/tests/testthat/helper-data.R:
##########
@@ -25,7 +25,7 @@ example_data <- tibble::tibble(
   fct = factor(letters[c(1:4, NA, NA, 7:10)])
 )
 
-example_with_metadata <- tibble::tibble(
+old_example_with_metadata <- tibble::tibble(

Review Comment:
   Why rename?



##########
r/R/record-batch.R:
##########
@@ -189,3 +189,56 @@ record_batch <- RecordBatch$create
 
 #' @export
 names.RecordBatch <- function(x) x$names()
+
+#' Convert an object to an Arrow RecordBatch
+#'
+#' Whereas [record_batch()] constructs a [RecordBatch] from one or more 
columns,
+#' `as_record_batch()` converts a single object to an Arrow [RecordBatch].
+#'
+#' @param x An object to convert to an Arrow RecordBatch
+#' @param ... Passed to S3 methods
+#' @inheritParams record_batch
+#'
+#' @return A [RecordBatch]
+#' @export
+#'
+#' @examplesIf arrow_available()
+#' as_record_batch(data.frame(col1 = 1, col2 = "two"))

Review Comment:
   This is another case where examples would be good to show the difference 
between record_batch() and as_record_batch()



##########
r/tests/testthat/test-RecordBatch.R:
##########
@@ -688,3 +688,53 @@ test_that("RecordBatchReader to C-interface to 
arrow_dplyr_query", {
   # must clean up the pointer or we leak
   delete_arrow_array_stream(stream_ptr)
 })
+
+
+test_that("as_record_batch() works for RecordBatch", {
+  batch <- record_batch(col1 = 1L, col2 = "two")
+  expect_identical(as_record_batch(batch), batch)
+  expect_equal(
+    as_record_batch(batch, schema = schema(col1 = float64(), col2 = string())),
+    record_batch(col1 = Array$create(1, type = float64()), col2 = "two")
+  )
+})
+
+test_that("as_record_batch() works for Table", {
+  batch <- record_batch(col1 = 1L, col2 = "two")
+  table <- arrow_table(col1 = 1L, col2 = "two")
+
+  expect_equal(as_record_batch(table), batch)
+  expect_equal(
+    as_record_batch(table, schema = schema(col1 = float64(), col2 = string())),
+    record_batch(col1 = Array$create(1, type = float64()), col2 = "two")
+  )
+
+  # also check zero column table and make sure row count is preserved
+  table0 <- table[integer()]
+  expect_identical(table0$num_columns, 0L)
+  expect_identical(table0$num_rows, 1L)
+
+  batch0 <- as_record_batch(table0)
+  expect_identical(batch0$num_columns, 0L)
+  expect_identical(batch0$num_rows, 1L)
+})
+
+test_that("as_record_batch() works for data.frame()", {
+  batch <- record_batch(col1 = 1L, col2 = "two")
+  tbl <- tibble::tibble(col1 = 1L, col2 = "two")
+
+  # metadata prevents these from being equal

Review Comment:
   Why? Should they have different metadata?
   
   If so, you don't have to delete the metadata, you can do 
`expect_equal(batch2, batch, ignore_attr = TRUE)`



##########
r/tests/testthat/test-Array.R:
##########
@@ -1010,6 +1010,110 @@ test_that("auto int64 conversion to int can be disabled 
(ARROW-10093)", {
   })
 })
 
+test_that("as_arrow_array() default method calls Array$create()", {
+  expect_equal(
+    as_arrow_array(1:10),
+    Array$create(1:10)
+  )
+
+  expect_equal(
+    as_arrow_array(1:10, type = float64()),
+    Array$create(1:10, type = float64())
+  )
+})
+
+test_that("as_arrow_array() works for Array", {
+  array <- Array$create(logical(), type = null())
+  expect_identical(as_arrow_array(array), array)
+  expect_equal(
+    as_arrow_array(array, type = int32()),
+    Array$create(integer())
+  )
+})
+
+test_that("as_arrow_array() works for ChunkedArray", {
+  expect_equal(
+    as_arrow_array(chunked_array(type = null())),
+    Array$create(logical(), type = null())
+  )
+
+  expect_equal(
+    as_arrow_array(chunked_array(1:3, 4:6)),
+    Array$create(1:6)
+  )
+
+  expect_equal(
+    as_arrow_array(chunked_array(1:3, 4:6), type = float64()),
+    Array$create(1:6, type = float64())
+  )
+})
+
+test_that("as_arrow_array() works for vctr_vctr types", {
+  vctr <- vctrs::new_vctr(1:5, class = "custom_vctr")
+  expect_equal(
+    as_arrow_array(vctr),
+    vctrs_extension_array(vctr)
+  )
+
+  # with explicit type
+  expect_equal(
+    as_arrow_array(
+      vctr,
+      type = vctrs_extension_type(
+        vctrs::vec_ptype(vctr),
+        storage_type = float64()
+      )
+    ),
+    vctrs_extension_array(
+      vctr,
+      storage_type = float64()
+    )
+  )
+
+  # with impossible type
+  expect_snapshot_error(as_arrow_array(vctr, type = float64()))
+})
+
+test_that("as_arrow_array() works for nested extension types", {
+  vctr <- vctrs::new_vctr(1:5, class = "custom_vctr")
+
+  nested <- tibble::tibble(x = vctr)
+  type <- infer_type(nested)
+
+  # with type = NULL
+  nested_array <- as_arrow_array(nested)
+  expect_identical(as.vector(nested_array), nested)
+
+  # with explicit type
+  expect_equal(as_arrow_array(nested, type = type), nested_array)
+
+  # with extension type
+  extension_array <- vctrs_extension_array(nested)
+  expect_equal(
+    as_arrow_array(nested, type = extension_array$type),
+    extension_array
+  )
+})
+
+test_that("as_arrow_array() default method errors for impossible cases", {
+  vec <- structure(list(), class = "class_not_supported")
+
+  # check errors simulating a call from C++
+  expect_snapshot_error(as_arrow_array(vec, from_constructor = TRUE))
+  expect_snapshot_error(
+    as_arrow_array(vec, type = float64(), from_constructor = TRUE)
+  )
+
+  # check that the errors propagate through Array$create()
+  type.class_not_supported <- function(x, ...) {
+    float64()
+  }
+
+  # slightly different error if type was specified

Review Comment:
   Forgive the dead-horse beating, but it's hard for me to tell that this is 
the case by looking at the test file.



##########
r/tests/testthat/test-Array.R:
##########
@@ -1010,6 +1010,110 @@ test_that("auto int64 conversion to int can be disabled 
(ARROW-10093)", {
   })
 })
 
+test_that("as_arrow_array() default method calls Array$create()", {
+  expect_equal(
+    as_arrow_array(1:10),
+    Array$create(1:10)
+  )
+
+  expect_equal(
+    as_arrow_array(1:10, type = float64()),
+    Array$create(1:10, type = float64())
+  )
+})
+
+test_that("as_arrow_array() works for Array", {
+  array <- Array$create(logical(), type = null())
+  expect_identical(as_arrow_array(array), array)
+  expect_equal(
+    as_arrow_array(array, type = int32()),
+    Array$create(integer())
+  )
+})
+
+test_that("as_arrow_array() works for ChunkedArray", {
+  expect_equal(
+    as_arrow_array(chunked_array(type = null())),
+    Array$create(logical(), type = null())
+  )
+
+  expect_equal(
+    as_arrow_array(chunked_array(1:3, 4:6)),
+    Array$create(1:6)
+  )
+
+  expect_equal(
+    as_arrow_array(chunked_array(1:3, 4:6), type = float64()),
+    Array$create(1:6, type = float64())
+  )
+})
+
+test_that("as_arrow_array() works for vctr_vctr types", {

Review Comment:
   ```suggestion
   test_that("as_arrow_array() works for vctrs_vctr types", {
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to