This is an automated email from the ASF dual-hosted git repository.

wesm pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow.git


The following commit(s) were added to refs/heads/master by this push:
     new ad7e6c3  ARROW-2989: [C++/Python] Remove API deprecations in 0.10
ad7e6c3 is described below

commit ad7e6c3d5422eb8742fc198905ca2460dce685c6
Author: Wes McKinney <[email protected]>
AuthorDate: Tue Sep 4 17:24:23 2018 -0400

    ARROW-2989: [C++/Python] Remove API deprecations in 0.10
    
    I didn't remove the `nthreads` deprecations in Python yet. @pitrou do you 
support removing them in 0.11?
    
    Author: Wes McKinney <[email protected]>
    
    Closes #2478 from wesm/ARROW-2989 and squashes the following commits:
    
    8de36e6a <Wes McKinney> Test non-threaded conversion
    bff138a0 <Wes McKinney> Make use_threads=True the default in pandas 
conversion tests
    5ae97a96 <Wes McKinney> Remove nthreads argument where thread pool is being 
used. Set default for use_threads to True
    279453c7 <Wes McKinney> Remove C++/Python API deprecations in 0.10, except 
for nthreads in Python
---
 cpp/src/arrow/buffer.cc                     |  4 --
 cpp/src/arrow/buffer.h                      |  5 --
 cpp/src/arrow/builder.cc                    | 81 -----------------------------
 cpp/src/arrow/builder.h                     | 76 ---------------------------
 cpp/src/plasma/client.h                     |  4 --
 cpp/src/plasma/test/client_tests.cc         |  7 ---
 python/pyarrow/__init__.py                  |  2 -
 python/pyarrow/feather.py                   | 16 +++---
 python/pyarrow/io.pxi                       |  9 ----
 python/pyarrow/ipc.pxi                      |  2 +-
 python/pyarrow/ipc.py                       |  9 ++--
 python/pyarrow/table.pxi                    | 18 +++----
 python/pyarrow/tests/test_convert_pandas.py | 12 +++--
 python/pyarrow/util.py                      | 10 ----
 14 files changed, 28 insertions(+), 227 deletions(-)

diff --git a/cpp/src/arrow/buffer.cc b/cpp/src/arrow/buffer.cc
index 388e98f..2c01041 100644
--- a/cpp/src/arrow/buffer.cc
+++ b/cpp/src/arrow/buffer.cc
@@ -210,8 +210,4 @@ Status AllocateEmptyBitmap(int64_t length, 
std::shared_ptr<Buffer>* out) {
   return AllocateEmptyBitmap(default_memory_pool(), length, out);
 }
 
-Status GetEmptyBitmap(MemoryPool* pool, int64_t length, 
std::shared_ptr<Buffer>* out) {
-  return AllocateEmptyBitmap(pool, length, out);
-}
-
 }  // namespace arrow
diff --git a/cpp/src/arrow/buffer.h b/cpp/src/arrow/buffer.h
index 442c451..99beb23 100644
--- a/cpp/src/arrow/buffer.h
+++ b/cpp/src/arrow/buffer.h
@@ -319,11 +319,6 @@ Status AllocateEmptyBitmap(MemoryPool* pool, int64_t 
length,
 ARROW_EXPORT
 Status AllocateEmptyBitmap(int64_t length, std::shared_ptr<Buffer>* out);
 
-/// \deprecated Use AllocateEmptyBitmap instead.
-ARROW_DEPRECATED("Use AllocateEmptyBitmap instead")
-ARROW_EXPORT
-Status GetEmptyBitmap(MemoryPool* pool, int64_t length, 
std::shared_ptr<Buffer>* out);
-
 // ----------------------------------------------------------------------
 // Buffer builder classes
 
diff --git a/cpp/src/arrow/builder.cc b/cpp/src/arrow/builder.cc
index 8bdcb60..65d1ea7 100644
--- a/cpp/src/arrow/builder.cc
+++ b/cpp/src/arrow/builder.cc
@@ -234,12 +234,6 @@ Status PrimitiveBuilder<T>::AppendValues(const value_type* 
values, int64_t lengt
 }
 
 template <typename T>
-Status PrimitiveBuilder<T>::Append(const value_type* values, int64_t length,
-                                   const uint8_t* valid_bytes) {
-  return AppendValues(values, length, valid_bytes);
-}
-
-template <typename T>
 Status PrimitiveBuilder<T>::AppendValues(const value_type* values, int64_t 
length,
                                          const std::vector<bool>& is_valid) {
   RETURN_NOT_OK(Reserve(length));
@@ -256,34 +250,17 @@ Status PrimitiveBuilder<T>::AppendValues(const 
value_type* values, int64_t lengt
 }
 
 template <typename T>
-Status PrimitiveBuilder<T>::Append(const value_type* values, int64_t length,
-                                   const std::vector<bool>& is_valid) {
-  return AppendValues(values, length, is_valid);
-}
-
-template <typename T>
 Status PrimitiveBuilder<T>::AppendValues(const std::vector<value_type>& values,
                                          const std::vector<bool>& is_valid) {
   return AppendValues(values.data(), static_cast<int64_t>(values.size()), 
is_valid);
 }
 
 template <typename T>
-Status PrimitiveBuilder<T>::Append(const std::vector<value_type>& values,
-                                   const std::vector<bool>& is_valid) {
-  return AppendValues(values, is_valid);
-}
-
-template <typename T>
 Status PrimitiveBuilder<T>::AppendValues(const std::vector<value_type>& 
values) {
   return AppendValues(values.data(), static_cast<int64_t>(values.size()));
 }
 
 template <typename T>
-Status PrimitiveBuilder<T>::Append(const std::vector<value_type>& values) {
-  return AppendValues(values);
-}
-
-template <typename T>
 Status PrimitiveBuilder<T>::FinishInternal(std::shared_ptr<ArrayData>* out) {
   RETURN_NOT_OK(TrimBuffer(BitUtil::BytesForBits(length_), 
null_bitmap_.get()));
   RETURN_NOT_OK(TrimBuffer(TypeTraits<T>::bytes_required(length_), 
data_.get()));
@@ -427,11 +404,6 @@ Status AdaptiveIntBuilder::AppendValues(const int64_t* 
values, int64_t length,
   return Status::OK();
 }
 
-Status AdaptiveIntBuilder::Append(const int64_t* values, int64_t length,
-                                  const uint8_t* valid_bytes) {
-  return AppendValues(values, length, valid_bytes);
-}
-
 template <typename new_type, typename old_type>
 typename std::enable_if<sizeof(old_type) >= sizeof(new_type), Status>::type
 AdaptiveIntBuilder::ExpandIntSizeInternal() {
@@ -585,11 +557,6 @@ Status AdaptiveUIntBuilder::AppendValues(const uint64_t* 
values, int64_t length,
   return Status::OK();
 }
 
-Status AdaptiveUIntBuilder::Append(const uint64_t* values, int64_t length,
-                                   const uint8_t* valid_bytes) {
-  return AppendValues(values, length, valid_bytes);
-}
-
 template <typename new_type, typename old_type>
 typename std::enable_if<sizeof(old_type) >= sizeof(new_type), Status>::type
 AdaptiveUIntBuilder::ExpandIntSizeInternal() {
@@ -733,11 +700,6 @@ Status BooleanBuilder::AppendValues(const uint8_t* values, 
int64_t length,
   return Status::OK();
 }
 
-Status BooleanBuilder::Append(const uint8_t* values, int64_t length,
-                              const uint8_t* valid_bytes) {
-  return AppendValues(values, length, valid_bytes);
-}
-
 Status BooleanBuilder::AppendValues(const uint8_t* values, int64_t length,
                                     const std::vector<bool>& is_valid) {
   RETURN_NOT_OK(Reserve(length));
@@ -752,29 +714,15 @@ Status BooleanBuilder::AppendValues(const uint8_t* 
values, int64_t length,
   return Status::OK();
 }
 
-Status BooleanBuilder::Append(const uint8_t* values, int64_t length,
-                              const std::vector<bool>& is_valid) {
-  return AppendValues(values, length, is_valid);
-}
-
 Status BooleanBuilder::AppendValues(const std::vector<uint8_t>& values,
                                     const std::vector<bool>& is_valid) {
   return AppendValues(values.data(), static_cast<int64_t>(values.size()), 
is_valid);
 }
 
-Status BooleanBuilder::Append(const std::vector<uint8_t>& values,
-                              const std::vector<bool>& is_valid) {
-  return AppendValues(values, is_valid);
-}
-
 Status BooleanBuilder::AppendValues(const std::vector<uint8_t>& values) {
   return AppendValues(values.data(), static_cast<int64_t>(values.size()));
 }
 
-Status BooleanBuilder::Append(const std::vector<uint8_t>& values) {
-  return AppendValues(values);
-}
-
 Status BooleanBuilder::AppendValues(const std::vector<bool>& values,
                                     const std::vector<bool>& is_valid) {
   const int64_t length = static_cast<int64_t>(values.size());
@@ -790,11 +738,6 @@ Status BooleanBuilder::AppendValues(const 
std::vector<bool>& values,
   return Status::OK();
 }
 
-Status BooleanBuilder::Append(const std::vector<bool>& values,
-                              const std::vector<bool>& is_valid) {
-  return AppendValues(values, is_valid);
-}
-
 Status BooleanBuilder::AppendValues(const std::vector<bool>& values) {
   const int64_t length = static_cast<int64_t>(values.size());
   RETURN_NOT_OK(Reserve(length));
@@ -808,10 +751,6 @@ Status BooleanBuilder::AppendValues(const 
std::vector<bool>& values) {
   return Status::OK();
 }
 
-Status BooleanBuilder::Append(const std::vector<bool>& values) {
-  return AppendValues(values);
-}
-
 // ----------------------------------------------------------------------
 // DictionaryBuilder
 
@@ -1236,11 +1175,6 @@ Status ListBuilder::AppendValues(const int32_t* offsets, 
int64_t length,
   return Status::OK();
 }
 
-Status ListBuilder::Append(const int32_t* offsets, int64_t length,
-                           const uint8_t* valid_bytes) {
-  return AppendValues(offsets, length, valid_bytes);
-}
-
 Status ListBuilder::AppendNextOffset() {
   int64_t num_values = value_builder_->length();
   if (ARROW_PREDICT_FALSE(num_values > kListMaximumElements)) {
@@ -1418,11 +1352,6 @@ Status StringBuilder::AppendValues(const 
std::vector<std::string>& values,
   return Status::OK();
 }
 
-Status StringBuilder::Append(const std::vector<std::string>& values,
-                             const uint8_t* valid_bytes) {
-  return AppendValues(values, valid_bytes);
-}
-
 Status StringBuilder::AppendValues(const char** values, int64_t length,
                                    const uint8_t* valid_bytes) {
   std::size_t total_length = 0;
@@ -1481,11 +1410,6 @@ Status StringBuilder::AppendValues(const char** values, 
int64_t length,
   return Status::OK();
 }
 
-Status StringBuilder::Append(const char** values, int64_t length,
-                             const uint8_t* valid_bytes) {
-  return AppendValues(values, length, valid_bytes);
-}
-
 // ----------------------------------------------------------------------
 // Fixed width binary
 
@@ -1502,11 +1426,6 @@ Status FixedSizeBinaryBuilder::AppendValues(const 
uint8_t* data, int64_t length,
   return byte_builder_.Append(data, length * byte_width_);
 }
 
-Status FixedSizeBinaryBuilder::Append(const uint8_t* data, int64_t length,
-                                      const uint8_t* valid_bytes) {
-  return AppendValues(data, length, valid_bytes);
-}
-
 Status FixedSizeBinaryBuilder::Append(const std::string& value) {
   return Append(reinterpret_cast<const uint8_t*>(value.c_str()));
 }
diff --git a/cpp/src/arrow/builder.h b/cpp/src/arrow/builder.h
index 69f2bde..432ea9f 100644
--- a/cpp/src/arrow/builder.h
+++ b/cpp/src/arrow/builder.h
@@ -116,9 +116,6 @@ class ARROW_EXPORT ArrayBuilder {
   /// this function responsibly.
   Status Advance(int64_t elements);
 
-  ARROW_DEPRECATED("Use Finish instead")
-  std::shared_ptr<ResizableBuffer> null_bitmap() const { return null_bitmap_; }
-
   /// \brief Return result of builder as an internal generic ArrayData
   /// object. Resets builder except for dictionary builder
   ///
@@ -252,9 +249,6 @@ class ARROW_EXPORT PrimitiveBuilder : public ArrayBuilder {
     return Status::OK();
   }
 
-  ARROW_DEPRECATED("Use Finish instead")
-  std::shared_ptr<Buffer> data() const { return data_; }
-
   value_type GetValue(int64_t index) const {
     return reinterpret_cast<const value_type*>(data_->data())[index];
   }
@@ -267,10 +261,6 @@ class ARROW_EXPORT PrimitiveBuilder : public ArrayBuilder {
   /// \return Status
   Status AppendValues(const value_type* values, int64_t length,
                       const uint8_t* valid_bytes = NULLPTR);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const value_type* values, int64_t length,
-                const uint8_t* valid_bytes = NULLPTR);
 
   /// \brief Append a sequence of elements in one shot
   /// \param[in] values a contiguous C array of values
@@ -280,10 +270,6 @@ class ARROW_EXPORT PrimitiveBuilder : public ArrayBuilder {
   /// \return Status
   Status AppendValues(const value_type* values, int64_t length,
                       const std::vector<bool>& is_valid);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const value_type* values, int64_t length,
-                const std::vector<bool>& is_valid);
 
   /// \brief Append a sequence of elements in one shot
   /// \param[in] values a std::vector of values
@@ -292,9 +278,6 @@ class ARROW_EXPORT PrimitiveBuilder : public ArrayBuilder {
   /// \return Status
   Status AppendValues(const std::vector<value_type>& values,
                       const std::vector<bool>& is_valid);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const std::vector<value_type>& values, const 
std::vector<bool>& is_valid);
 
   /// \brief Append a sequence of elements in one shot
   /// \param[in] values a std::vector of values
@@ -364,10 +347,6 @@ class ARROW_EXPORT PrimitiveBuilder : public ArrayBuilder {
     return Status::OK();
   }
 
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const std::vector<value_type>& values);
-
   Status FinishInternal(std::shared_ptr<ArrayData>* out) override;
   void Reset() override;
 
@@ -391,7 +370,6 @@ class ARROW_EXPORT NumericBuilder : public 
PrimitiveBuilder<T> {
           ARROW_MEMORY_POOL_DEFAULT)
       : PrimitiveBuilder<T1>(TypeTraits<T1>::type_singleton(), pool) {}
 
-  using PrimitiveBuilder<T>::Append;
   using PrimitiveBuilder<T>::AppendValues;
   using PrimitiveBuilder<T>::Resize;
   using PrimitiveBuilder<T>::Reserve;
@@ -461,9 +439,6 @@ class ARROW_EXPORT AdaptiveIntBuilderBase : public 
ArrayBuilder {
     return Status::OK();
   }
 
-  ARROW_DEPRECATED("Use Finish instead")
-  std::shared_ptr<Buffer> data() const { return data_; }
-
   void Reset() override;
   Status Resize(int64_t capacity) override;
 
@@ -561,10 +536,6 @@ class ARROW_EXPORT AdaptiveUIntBuilder : public 
internal::AdaptiveIntBuilderBase
   /// \return Status
   Status AppendValues(const uint64_t* values, int64_t length,
                       const uint8_t* valid_bytes = NULLPTR);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const uint64_t* values, int64_t length,
-                const uint8_t* valid_bytes = NULLPTR);
 
   Status FinishInternal(std::shared_ptr<ArrayData>* out) override;
 
@@ -628,10 +599,6 @@ class ARROW_EXPORT AdaptiveIntBuilder : public 
internal::AdaptiveIntBuilderBase
   /// \return Status
   Status AppendValues(const int64_t* values, int64_t length,
                       const uint8_t* valid_bytes = NULLPTR);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const int64_t* values, int64_t length,
-                const uint8_t* valid_bytes = NULLPTR);
 
   Status FinishInternal(std::shared_ptr<ArrayData>* out) override;
 
@@ -675,9 +642,6 @@ class ARROW_EXPORT BooleanBuilder : public ArrayBuilder {
     return Status::OK();
   }
 
-  ARROW_DEPRECATED("Use Finish instead")
-  std::shared_ptr<Buffer> data() const { return data_; }
-
   /// Scalar append
   Status Append(const bool val) {
     RETURN_NOT_OK(Reserve(1));
@@ -701,10 +665,6 @@ class ARROW_EXPORT BooleanBuilder : public ArrayBuilder {
   /// \return Status
   Status AppendValues(const uint8_t* values, int64_t length,
                       const uint8_t* valid_bytes = NULLPTR);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const uint8_t* values, int64_t length,
-                const uint8_t* valid_bytes = NULLPTR);
 
   /// \brief Append a sequence of elements in one shot
   /// \param[in] values a contiguous C array of values
@@ -714,9 +674,6 @@ class ARROW_EXPORT BooleanBuilder : public ArrayBuilder {
   /// \return Status
   Status AppendValues(const uint8_t* values, int64_t length,
                       const std::vector<bool>& is_valid);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const uint8_t* values, int64_t length, const 
std::vector<bool>& is_valid);
 
   /// \brief Append a sequence of elements in one shot
   /// \param[in] values a std::vector of bytes
@@ -725,17 +682,11 @@ class ARROW_EXPORT BooleanBuilder : public ArrayBuilder {
   /// \return Status
   Status AppendValues(const std::vector<uint8_t>& values,
                       const std::vector<bool>& is_valid);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const std::vector<uint8_t>& values, const std::vector<bool>& 
is_valid);
 
   /// \brief Append a sequence of elements in one shot
   /// \param[in] values a std::vector of bytes
   /// \return Status
   Status AppendValues(const std::vector<uint8_t>& values);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const std::vector<uint8_t>& values);
 
   /// \brief Append a sequence of elements in one shot
   /// \param[in] values an std::vector<bool> indicating true (1) or false
@@ -743,17 +694,11 @@ class ARROW_EXPORT BooleanBuilder : public ArrayBuilder {
   /// (0). Equal in length to values
   /// \return Status
   Status AppendValues(const std::vector<bool>& values, const 
std::vector<bool>& is_valid);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const std::vector<bool>& values, const std::vector<bool>& 
is_valid);
 
   /// \brief Append a sequence of elements in one shot
   /// \param[in] values an std::vector<bool> indicating true (1) or false
   /// \return Status
   Status AppendValues(const std::vector<bool>& values);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const std::vector<bool>& values);
 
   /// \brief Append a sequence of elements in one shot
   /// \param[in] values_begin InputIterator to the beginning of the values
@@ -865,10 +810,6 @@ class ARROW_EXPORT ListBuilder : public ArrayBuilder {
   /// will be considered as a null for that slot
   Status AppendValues(const int32_t* offsets, int64_t length,
                       const uint8_t* valid_bytes = NULLPTR);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const int32_t* offsets, int64_t length,
-                const uint8_t* valid_bytes = NULLPTR);
 
   /// \brief Start a new variable-length list slot
   ///
@@ -955,10 +896,6 @@ class ARROW_EXPORT StringBuilder : public BinaryBuilder {
   /// \return Status
   Status AppendValues(const std::vector<std::string>& values,
                       const uint8_t* valid_bytes = NULLPTR);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const std::vector<std::string>& values,
-                const uint8_t* valid_bytes = NULLPTR);
 
   /// \brief Append a sequence of nul-terminated strings in one shot.
   ///        If one of the values is NULL, it is processed as a null
@@ -971,10 +908,6 @@ class ARROW_EXPORT StringBuilder : public BinaryBuilder {
   /// \return Status
   Status AppendValues(const char** values, int64_t length,
                       const uint8_t* valid_bytes = NULLPTR);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const char** values, int64_t length,
-                const uint8_t* valid_bytes = NULLPTR);
 };
 
 // ----------------------------------------------------------------------
@@ -1003,10 +936,6 @@ class ARROW_EXPORT FixedSizeBinaryBuilder : public 
ArrayBuilder {
 
   Status AppendValues(const uint8_t* data, int64_t length,
                       const uint8_t* valid_bytes = NULLPTR);
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(const uint8_t* data, int64_t length,
-                const uint8_t* valid_bytes = NULLPTR);
   Status Append(const std::string& value);
   Status AppendNull();
 
@@ -1069,11 +998,6 @@ class ARROW_EXPORT StructBuilder : public ArrayBuilder {
     UnsafeAppendToBitmap(valid_bytes, length);
     return Status::OK();
   }
-  /// \deprecated Use AppendValues instead.
-  ARROW_DEPRECATED("Use AppendValues instead")
-  Status Append(int64_t length, const uint8_t* valid_bytes) {
-    return AppendValues(length, valid_bytes);
-  }
 
   /// Append an element to the Struct. All child-builders' Append method must
   /// be called independently to maintain data-structure consistency.
diff --git a/cpp/src/plasma/client.h b/cpp/src/plasma/client.h
index a95b992..9d4dbf5 100644
--- a/cpp/src/plasma/client.h
+++ b/cpp/src/plasma/client.h
@@ -34,10 +34,6 @@ using arrow::Status;
 
 namespace plasma {
 
-ARROW_DEPRECATED("PLASMA_DEFAULT_RELEASE_DELAY is deprecated")
-constexpr int64_t kDeprecatedPlasmaDefaultReleaseDelay = 64;
-#define PLASMA_DEFAULT_RELEASE_DELAY 
plasma::kDeprecatedPlasmaDefaultReleaseDelay
-
 /// We keep a queue of unreleased objects cached in the client until we start
 /// sending release requests to the store. This is to avoid frequently mapping
 /// and unmapping objects and evicting data from processor caches.
diff --git a/cpp/src/plasma/test/client_tests.cc 
b/cpp/src/plasma/test/client_tests.cc
index d391cd9..a2725a6 100644
--- a/cpp/src/plasma/test/client_tests.cc
+++ b/cpp/src/plasma/test/client_tests.cc
@@ -487,13 +487,6 @@ TEST_F(TestPlasmaStore, ManyObjectTest) {
   }
 }
 
-#ifndef ARROW_NO_DEPRECATED_API
-TEST_F(TestPlasmaStore, DeprecatedApiTest) {
-  int64_t default_delay = PLASMA_DEFAULT_RELEASE_DELAY;
-  ARROW_CHECK(default_delay == plasma::kPlasmaDefaultReleaseDelay);
-}
-#endif  // ARROW_NO_DEPRECATED_API
-
 #ifdef PLASMA_GPU
 using arrow::gpu::CudaBuffer;
 using arrow::gpu::CudaBufferReader;
diff --git a/python/pyarrow/__init__.py b/python/pyarrow/__init__.py
index 7fb1610..bed5930 100644
--- a/python/pyarrow/__init__.py
+++ b/python/pyarrow/__init__.py
@@ -171,8 +171,6 @@ def _plasma_store_entry_point():
 
 from pyarrow.util import _deprecate_api  # noqa
 
-frombuffer = _deprecate_api('frombuffer', 'py_buffer', py_buffer, '0.9.0')
-
 # ----------------------------------------------------------------------
 # Returning absolute path to the pyarrow include directory (if bundled, e.g. in
 # wheels)
diff --git a/python/pyarrow/feather.py b/python/pyarrow/feather.py
index 26f22da..4a693fe 100644
--- a/python/pyarrow/feather.py
+++ b/python/pyarrow/feather.py
@@ -26,7 +26,6 @@ from pyarrow.compat import pdapi
 from pyarrow.lib import FeatherError  # noqa
 from pyarrow.lib import RecordBatch, Table, concat_tables
 import pyarrow.lib as ext
-from .util import _deprecate_nthreads
 
 
 try:
@@ -68,8 +67,7 @@ class FeatherReader(ext.FeatherReader):
         table = Table.from_arrays(columns, names=names)
         return table
 
-    def read_pandas(self, columns=None, nthreads=None, use_threads=False):
-        use_threads = _deprecate_nthreads(use_threads, nthreads)
+    def read_pandas(self, columns=None, use_threads=True):
         return self.read_table(columns=columns).to_pandas(
             use_threads=use_threads)
 
@@ -145,7 +143,7 @@ class FeatherDataset(object):
                              .format(piece, self.schema,
                                      table.schema))
 
-    def read_pandas(self, columns=None, nthreads=None, use_threads=False):
+    def read_pandas(self, columns=None, use_threads=True):
         """
         Read multiple Parquet files as a single pandas DataFrame
 
@@ -153,15 +151,14 @@ class FeatherDataset(object):
         ----------
         columns : List[str]
             Names of columns to read from the file
-        nthreads : int, default 1
-            Number of columns to read in parallel.
+        use_threads : boolean, default True
+            Use multiple threads when converting to pandas
 
         Returns
         -------
         pandas.DataFrame
             Content of the file as a pandas DataFrame (of columns)
         """
-        use_threads = _deprecate_nthreads(use_threads, nthreads)
         return self.read_table(columns=columns).to_pandas(
             use_threads=use_threads)
 
@@ -192,7 +189,7 @@ def write_feather(df, dest):
         raise
 
 
-def read_feather(source, columns=None, nthreads=None, use_threads=False):
+def read_feather(source, columns=None, use_threads=True):
     """
     Read a pandas.DataFrame from Feather format
 
@@ -202,14 +199,13 @@ def read_feather(source, columns=None, nthreads=None, 
use_threads=False):
     columns : sequence, optional
         Only read a specific set of columns. If not provided, all columns are
         read
-    use_threads: bool, default False
+    use_threads: bool, default True
         Whether to parallelize reading using multiple threads
 
     Returns
     -------
     df : pandas.DataFrame
     """
-    use_threads = _deprecate_nthreads(use_threads, nthreads)
     reader = FeatherReader(source)
     return reader.read_pandas(columns=columns, use_threads=use_threads)
 
diff --git a/python/pyarrow/io.pxi b/python/pyarrow/io.pxi
index c8d86cd..5a4d164 100644
--- a/python/pyarrow/io.pxi
+++ b/python/pyarrow/io.pxi
@@ -942,15 +942,6 @@ cdef class BufferOutputStream(NativeFile):
         self.is_writable = True
         self.closed = False
 
-    def get_result(self):
-        """
-        Deprecated as of 0.10.0. Alias for getvalue()
-        """
-        warnings.warn("BufferOutputStream.get_result() has been renamed "
-                      "to getvalue(), will be removed in 0.11.0",
-                      FutureWarning)
-        return self.getvalue()
-
     def getvalue(self):
         """
         Finalize output stream and return result as pyarrow.Buffer.
diff --git a/python/pyarrow/ipc.pxi b/python/pyarrow/ipc.pxi
index e00dca8..c60c21c 100644
--- a/python/pyarrow/ipc.pxi
+++ b/python/pyarrow/ipc.pxi
@@ -84,7 +84,7 @@ cdef class Message:
             check_status(self.message.get()
                          .SerializeTo(stream.wr_file.get(),
                                       &output_length))
-        return stream.get_result()
+        return stream.getvalue()
 
     def __repr__(self):
         metadata_len = self.metadata.size
diff --git a/python/pyarrow/ipc.py b/python/pyarrow/ipc.py
index 989e976..a79cafe 100644
--- a/python/pyarrow/ipc.py
+++ b/python/pyarrow/ipc.py
@@ -24,7 +24,6 @@ from pyarrow.lib import (Message, MessageReader,  # noqa
                          read_tensor, write_tensor,
                          get_record_batch_size, get_tensor_size)
 import pyarrow.lib as lib
-from .util import _deprecate_nthreads
 
 
 class _ReadPandasOption(object):
@@ -166,25 +165,23 @@ def serialize_pandas(df, nthreads=None, 
preserve_index=True):
     writer = pa.RecordBatchStreamWriter(sink, batch.schema)
     writer.write_batch(batch)
     writer.close()
-    return sink.get_result()
+    return sink.getvalue()
 
 
-def deserialize_pandas(buf, nthreads=None, use_threads=False):
+def deserialize_pandas(buf, use_threads=True):
     """Deserialize a buffer protocol compatible object into a pandas DataFrame.
 
     Parameters
     ----------
     buf : buffer
         An object compatible with the buffer protocol
-    use_threads: boolean, default False
+    use_threads: boolean, default True
         Whether to parallelize the conversion using multiple threads
 
     Returns
     -------
     df : pandas.DataFrame
     """
-    use_threads = _deprecate_nthreads(use_threads, nthreads)
-
     buffer_reader = pa.BufferReader(buf)
     reader = pa.RecordBatchStreamReader(buffer_reader)
     table = reader.read_all()
diff --git a/python/pyarrow/table.pxi b/python/pyarrow/table.pxi
index 825b2b8..513da28 100644
--- a/python/pyarrow/table.pxi
+++ b/python/pyarrow/table.pxi
@@ -27,8 +27,6 @@ except ImportError:
 else:
     import pyarrow.pandas_compat as pdcompat
 
-from .util import _deprecate_nthreads
-
 
 cdef class ChunkedArray:
     """
@@ -865,15 +863,19 @@ cdef class RecordBatch:
             entries.append((name, column))
         return OrderedDict(entries)
 
-    def to_pandas(self, nthreads=None, use_threads=False):
+    def to_pandas(self, use_threads=True):
         """
         Convert the arrow::RecordBatch to a pandas DataFrame
 
+        Parameters
+        ----------
+        use_threads : boolean, default True
+            Use multiple threads for conversion
+
         Returns
         -------
         pandas.DataFrame
         """
-        use_threads = _deprecate_nthreads(use_threads, nthreads)
         return Table.from_batches([self]).to_pandas(use_threads=use_threads)
 
     @classmethod
@@ -1289,9 +1291,9 @@ cdef class Table:
 
         return result
 
-    def to_pandas(self, nthreads=None, strings_to_categorical=False,
+    def to_pandas(self, strings_to_categorical=False,
                   memory_pool=None, zero_copy_only=False, categories=None,
-                  integer_object_nulls=False, use_threads=False):
+                  integer_object_nulls=False, use_threads=True):
         """
         Convert the arrow::Table to a pandas DataFrame
 
@@ -1308,7 +1310,7 @@ cdef class Table:
             List of columns that should be returned as pandas.Categorical
         integer_object_nulls : boolean, default False
             Cast integers with nulls to objects
-        use_threads: boolean, default False
+        use_threads: boolean, default True
             Whether to parallelize the conversion using multiple threads
 
         Returns
@@ -1318,8 +1320,6 @@ cdef class Table:
         cdef:
             PandasOptions options
 
-        use_threads = _deprecate_nthreads(use_threads, nthreads)
-
         options = PandasOptions(
             strings_to_categorical=strings_to_categorical,
             zero_copy_only=zero_copy_only,
diff --git a/python/pyarrow/tests/test_convert_pandas.py 
b/python/pyarrow/tests/test_convert_pandas.py
index 1cb58ff..4f65547 100644
--- a/python/pyarrow/tests/test_convert_pandas.py
+++ b/python/pyarrow/tests/test_convert_pandas.py
@@ -57,7 +57,7 @@ def _alltypes_example(size=100):
     })
 
 
-def _check_pandas_roundtrip(df, expected=None, use_threads=False,
+def _check_pandas_roundtrip(df, expected=None, use_threads=True,
                             expected_schema=None,
                             check_dtype=True, schema=None,
                             preserve_index=False,
@@ -1836,6 +1836,12 @@ class TestZeroCopyConversion(object):
 
 
 # This function must be at the top-level for Python 2.7's multiprocessing
+def _non_threaded_conversion():
+    df = _alltypes_example()
+    _check_pandas_roundtrip(df, use_threads=False)
+    _check_pandas_roundtrip(df, use_threads=False, as_batch=True)
+
+
 def _threaded_conversion():
     df = _alltypes_example()
     _check_pandas_roundtrip(df, use_threads=True)
@@ -1881,8 +1887,8 @@ class TestConvertMisc(object):
             arr = np.array([], dtype=dtype)
             _check_array_roundtrip(arr, type=pa_type)
 
-    def test_threaded_conversion(self):
-        _threaded_conversion()
+    def test_non_threaded_conversion(self):
+        _non_threaded_conversion()
 
     def test_threaded_conversion_multiprocess(self):
         # Parallel conversion should work from child processes too (ARROW-2963)
diff --git a/python/pyarrow/util.py b/python/pyarrow/util.py
index 2954b62..b882565 100644
--- a/python/pyarrow/util.py
+++ b/python/pyarrow/util.py
@@ -35,13 +35,3 @@ def _deprecate_api(old_name, new_name, api, next_version):
         warnings.warn(msg, FutureWarning)
         return api(*args)
     return wrapper
-
-
-def _deprecate_nthreads(use_threads, nthreads):
-    if nthreads is not None:
-        warnings.warn("`nthreads` argument is deprecated, "
-                      "pass `use_threads` instead", FutureWarning,
-                      stacklevel=3)
-        if nthreads > 1:
-            use_threads = True
-    return use_threads

Reply via email to