[ 
https://issues.apache.org/jira/browse/ARROW-2391?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16431128#comment-16431128
 ] 

ASF GitHub Bot commented on ARROW-2391:
---------------------------------------

pitrou closed pull request #1859: ARROW-2391: [C++/Python] Segmentation fault 
from PyArrow when mapping Pandas datetime column to pyarrow.date64
URL: https://github.com/apache/arrow/pull/1859
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/cpp/src/arrow/compute/kernels/cast.cc 
b/cpp/src/arrow/compute/kernels/cast.cc
index eaebd7cef..bfd519d18 100644
--- a/cpp/src/arrow/compute/kernels/cast.cc
+++ b/cpp/src/arrow/compute/kernels/cast.cc
@@ -396,21 +396,34 @@ struct CastFunctor<Date64Type, TimestampType> {
     ShiftTime<int64_t, int64_t>(ctx, options, conversion.first, 
conversion.second, input,
                                 output);
 
-    internal::BitmapReader bit_reader(input.buffers[0]->data(), input.offset,
-                                      input.length);
-
     // Ensure that intraday milliseconds have been zeroed out
     auto out_data = GetMutableValues<int64_t>(output, 1);
-    for (int64_t i = 0; i < input.length; ++i) {
-      const int64_t remainder = out_data[i] % kMillisecondsInDay;
-      if (ARROW_PREDICT_FALSE(!options.allow_time_truncate && 
bit_reader.IsSet() &&
-                              remainder > 0)) {
-        ctx->SetStatus(
-            Status::Invalid("Timestamp value had non-zero intraday 
milliseconds"));
-        break;
+
+    if (input.null_count != 0) {
+      internal::BitmapReader bit_reader(input.buffers[0]->data(), input.offset,
+                                        input.length);
+
+      for (int64_t i = 0; i < input.length; ++i) {
+        const int64_t remainder = out_data[i] % kMillisecondsInDay;
+        if (ARROW_PREDICT_FALSE(!options.allow_time_truncate && 
bit_reader.IsSet() &&
+                                remainder > 0)) {
+          ctx->SetStatus(
+              Status::Invalid("Timestamp value had non-zero intraday 
milliseconds"));
+          break;
+        }
+        out_data[i] -= remainder;
+        bit_reader.Next();
+      }
+    } else {
+      for (int64_t i = 0; i < input.length; ++i) {
+        const int64_t remainder = out_data[i] % kMillisecondsInDay;
+        if (ARROW_PREDICT_FALSE(!options.allow_time_truncate && remainder > 
0)) {
+          ctx->SetStatus(
+              Status::Invalid("Timestamp value had non-zero intraday 
milliseconds"));
+          break;
+        }
+        out_data[i] -= remainder;
       }
-      out_data[i] -= remainder;
-      bit_reader.Next();
     }
   }
 };
diff --git a/python/pyarrow/tests/test_convert_pandas.py 
b/python/pyarrow/tests/test_convert_pandas.py
index c6e2b75be..de6120176 100644
--- a/python/pyarrow/tests/test_convert_pandas.py
+++ b/python/pyarrow/tests/test_convert_pandas.py
@@ -807,6 +807,44 @@ def test_datetime64_to_date32(self):
 
         assert arr2.equals(arr.cast('date32'))
 
+    @pytest.mark.parametrize('mask', [
+        None,
+        np.ones(3),
+        np.array([True, False, False]),
+    ])
+    def test_pandas_datetime_to_date64(self, mask):
+        s = pd.to_datetime([
+            '2018-05-10T00:00:00',
+            '2018-05-11T00:00:00',
+            '2018-05-12T00:00:00',
+        ])
+        arr = pa.Array.from_pandas(s, type=pa.date64(), mask=mask)
+
+        data = np.array([
+            date(2018, 5, 10),
+            date(2018, 5, 11),
+            date(2018, 5, 12)
+        ])
+        expected = pa.array(data, mask=mask, type=pa.date64())
+
+        assert arr.equals(expected)
+
+    @pytest.mark.parametrize('mask', [
+        None,
+        np.ones(3),
+        np.array([True, False, False])
+    ])
+    def test_pandas_datetime_to_date64_failures(self, mask):
+        s = pd.to_datetime([
+            '2018-05-10T10:24:01',
+            '2018-05-11T10:24:01',
+            '2018-05-12T10:24:01',
+        ])
+
+        expected_msg = 'Timestamp value had non-zero intraday milliseconds'
+        with pytest.raises(pa.ArrowInvalid, msg=expected_msg):
+            pa.Array.from_pandas(s, type=pa.date64(), mask=mask)
+
     def test_date_infer(self):
         df = pd.DataFrame({
             'date': [date(2000, 1, 1),


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


> [Python] Segmentation fault from PyArrow when mapping Pandas datetime column 
> to pyarrow.date64
> ----------------------------------------------------------------------------------------------
>
>                 Key: ARROW-2391
>                 URL: https://issues.apache.org/jira/browse/ARROW-2391
>             Project: Apache Arrow
>          Issue Type: Bug
>          Components: Python
>    Affects Versions: 0.9.0
>         Environment: Mac OS High Sierra
> Python 3.6
>            Reporter: Dave Challis
>            Priority: Major
>              Labels: pull-request-available
>             Fix For: 0.10.0
>
>
> When trying to call `pyarrow.Table.from_pandas` with a `pandas.DataFrame` and 
> a `pyarrow.Schema` provided, the function call results in a segmentation 
> fault if Pandas `datetime64[ns]` column tries to be converted to a 
> `pyarrow.date64` type.
> A minimal example which shows this is:
> {code:python}
> import pandas as pd
> import pyarrow as pa
> df = pd.DataFrame({'created': ['2018-05-10T10:24:01']})
> df['created'] = pd.to_datetime(df['created'])}}
> schema = pa.schema([pa.field('created', pa.date64())])
> pa.Table.from_pandas(df, schema=schema)
> {code}
> Executing the above causes the python interpreter to exit with "Segmentation 
> fault: 11".
> Attempting to convert into various other datatypes (by specifying different 
> schemas) either succeeds, or raises an exception if the conversion is invalid.



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to