Repository: arrow
Updated Branches:
  refs/heads/master 4e0aa3cdd -> 5303594bf


ARROW-1395: [C++/Python] Remove APIs deprecated from 0.5.0 onward

Author: Wes McKinney <wes.mckin...@twosigma.com>

Closes #983 from wesm/ARROW-1395 and squashes the following commits:

c105a215 [Wes McKinney] Remove deprecated APIs from <= 0.4.0


Project: http://git-wip-us.apache.org/repos/asf/arrow/repo
Commit: http://git-wip-us.apache.org/repos/asf/arrow/commit/5303594b
Tree: http://git-wip-us.apache.org/repos/asf/arrow/tree/5303594b
Diff: http://git-wip-us.apache.org/repos/asf/arrow/diff/5303594b

Branch: refs/heads/master
Commit: 5303594bf3bfddfee14ca12e45ef32bcabb085c2
Parents: 4e0aa3c
Author: Wes McKinney <wes.mckin...@twosigma.com>
Authored: Mon Aug 21 22:30:20 2017 -0400
Committer: Wes McKinney <wes.mckin...@twosigma.com>
Committed: Mon Aug 21 22:30:20 2017 -0400

----------------------------------------------------------------------
 cpp/src/arrow/io/hdfs.h                   |  2 +-
 cpp/src/arrow/ipc/reader.h                |  7 ----
 cpp/src/arrow/ipc/writer.h                |  7 ----
 python/pyarrow/__init__.py                | 25 ++------------
 python/pyarrow/ipc.py                     |  2 +-
 python/pyarrow/tests/test_deprecations.py | 45 ++------------------------
 6 files changed, 7 insertions(+), 81 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/arrow/blob/5303594b/cpp/src/arrow/io/hdfs.h
----------------------------------------------------------------------
diff --git a/cpp/src/arrow/io/hdfs.h b/cpp/src/arrow/io/hdfs.h
index 1507ca9..ca88953 100644
--- a/cpp/src/arrow/io/hdfs.h
+++ b/cpp/src/arrow/io/hdfs.h
@@ -172,8 +172,8 @@ class ARROW_EXPORT HadoopFileSystem : public FileSystem {
   DISALLOW_COPY_AND_ASSIGN(HadoopFileSystem);
 };
 
-// 0.6.0
 #ifndef ARROW_NO_DEPRECATED_API
+/// \deprecated Since 0.6.0
 using HdfsClient = HadoopFileSystem;
 #endif
 

http://git-wip-us.apache.org/repos/asf/arrow/blob/5303594b/cpp/src/arrow/ipc/reader.h
----------------------------------------------------------------------
diff --git a/cpp/src/arrow/ipc/reader.h b/cpp/src/arrow/ipc/reader.h
index c0d3fb1..22d236a 100644
--- a/cpp/src/arrow/ipc/reader.h
+++ b/cpp/src/arrow/ipc/reader.h
@@ -194,13 +194,6 @@ Status ARROW_EXPORT ReadRecordBatch(const 
std::shared_ptr<Schema>& schema, int64
 Status ARROW_EXPORT ReadTensor(int64_t offset, io::RandomAccessFile* file,
                                std::shared_ptr<Tensor>* out);
 
-/// Backwards-compatibility for Arrow < 0.4.0
-///
-#ifndef ARROW_NO_DEPRECATED_API
-using StreamReader = RecordBatchReader;
-using FileReader = RecordBatchFileReader;
-#endif
-
 }  // namespace ipc
 }  // namespace arrow
 

http://git-wip-us.apache.org/repos/asf/arrow/blob/5303594b/cpp/src/arrow/ipc/writer.h
----------------------------------------------------------------------
diff --git a/cpp/src/arrow/ipc/writer.h b/cpp/src/arrow/ipc/writer.h
index c28dfe0..2a31d2c 100644
--- a/cpp/src/arrow/ipc/writer.h
+++ b/cpp/src/arrow/ipc/writer.h
@@ -177,13 +177,6 @@ Status ARROW_EXPORT WriteLargeRecordBatch(const 
RecordBatch& batch,
 Status ARROW_EXPORT WriteTensor(const Tensor& tensor, io::OutputStream* dst,
                                 int32_t* metadata_length, int64_t* 
body_length);
 
-/// Backwards-compatibility for Arrow < 0.4.0
-///
-#ifndef ARROW_NO_DEPRECATED_API
-using FileWriter = RecordBatchFileWriter;
-using StreamWriter = RecordBatchStreamWriter;
-#endif
-
 }  // namespace ipc
 }  // namespace arrow
 

http://git-wip-us.apache.org/repos/asf/arrow/blob/5303594b/python/pyarrow/__init__.py
----------------------------------------------------------------------
diff --git a/python/pyarrow/__init__.py b/python/pyarrow/__init__.py
index 4abf29e..d00286d 100644
--- a/python/pyarrow/__init__.py
+++ b/python/pyarrow/__init__.py
@@ -114,7 +114,8 @@ localfs = LocalFileSystem.get_instance()
 def _plasma_store_entry_point():
     """Entry point for starting the plasma store.
 
-    This can be used by invoking e. g. ``plasma_store -s /tmp/plasma -m 
1000000000``
+    This can be used by invoking e.g.
+    ``plasma_store -s /tmp/plasma -m 1000000000``
     from the command line and will start the plasma_store executable with the
     given arguments.
     """
@@ -127,30 +128,10 @@ def _plasma_store_entry_point():
     process.wait()
 
 # ----------------------------------------------------------------------
-# 0.4.0 deprecations
+# Deprecations
 
 from pyarrow.util import _deprecate_class
 
-FileReader = _deprecate_class('FileReader',
-                              'RecordBatchFileReader',
-                              RecordBatchFileReader, '0.5.0')
-
-FileWriter = _deprecate_class('FileWriter',
-                              'RecordBatchFileWriter',
-                              RecordBatchFileWriter, '0.5.0')
-
-StreamReader = _deprecate_class('StreamReader',
-                                'RecordBatchStreamReader',
-                                RecordBatchStreamReader, '0.5.0')
-
-StreamWriter = _deprecate_class('StreamWriter',
-                                'RecordBatchStreamWriter',
-                                RecordBatchStreamWriter, '0.5.0')
-
-InMemoryOutputStream = _deprecate_class('InMemoryOutputStream',
-                                        'BufferOutputStream',
-                                        BufferOutputStream, '0.5.0')
-
 # Backwards compatibility with pyarrow < 0.6.0
 HdfsClient = _deprecate_class('HdfsClient', 'pyarrow.hdfs.connect',
                               hdfs.connect, '0.6.0')

http://git-wip-us.apache.org/repos/asf/arrow/blob/5303594b/python/pyarrow/ipc.py
----------------------------------------------------------------------
diff --git a/python/pyarrow/ipc.py b/python/pyarrow/ipc.py
index f863128..d527722 100644
--- a/python/pyarrow/ipc.py
+++ b/python/pyarrow/ipc.py
@@ -155,7 +155,7 @@ def serialize_pandas(df):
         An object compatible with the buffer protocol
     """
     batch = pa.RecordBatch.from_pandas(df)
-    sink = pa.InMemoryOutputStream()
+    sink = pa.BufferOutputStream()
     writer = pa.RecordBatchStreamWriter(sink, batch.schema)
     writer.write_batch(batch)
     writer.close()

http://git-wip-us.apache.org/repos/asf/arrow/blob/5303594b/python/pyarrow/tests/test_deprecations.py
----------------------------------------------------------------------
diff --git a/python/pyarrow/tests/test_deprecations.py 
b/python/pyarrow/tests/test_deprecations.py
index 62b9666..b165289 100644
--- a/python/pyarrow/tests/test_deprecations.py
+++ b/python/pyarrow/tests/test_deprecations.py
@@ -17,48 +17,7 @@
 
 # Check that various deprecation warnings are raised
 
+# flake8: noqa
+
 import pyarrow as pa
 import pytest
-
-
-def test_inmemory_output_stream():
-    with pytest.warns(FutureWarning):
-        stream = pa.InMemoryOutputStream()
-        assert isinstance(stream, pa.BufferOutputStream)
-
-
-def test_file_reader_writer():
-    data = [
-        pa.array([1, 2, 3, 4]),
-        pa.array(['foo', 'bar', 'baz', None]),
-        pa.array([True, None, False, True])
-    ]
-    batch = pa.RecordBatch.from_arrays(data, ['f0', 'f1', 'f2'])
-
-    sink = pa.BufferOutputStream()
-
-    with pytest.warns(FutureWarning):
-        stream_writer = pa.StreamWriter(sink, batch.schema)
-        assert isinstance(stream_writer, pa.RecordBatchStreamWriter)
-
-    sink2 = pa.BufferOutputStream()
-    with pytest.warns(FutureWarning):
-        file_writer = pa.FileWriter(sink2, batch.schema)
-        assert isinstance(file_writer, pa.RecordBatchFileWriter)
-
-    file_writer.write_batch(batch)
-    stream_writer.write_batch(batch)
-
-    file_writer.close()
-    stream_writer.close()
-
-    buf = sink.get_result()
-    buf2 = sink2.get_result()
-
-    with pytest.warns(FutureWarning):
-        stream_reader = pa.StreamReader(buf)
-        assert isinstance(stream_reader, pa.RecordBatchStreamReader)
-
-    with pytest.warns(FutureWarning):
-        file_reader = pa.FileReader(buf2)
-        assert isinstance(file_reader, pa.RecordBatchFileReader)

Reply via email to