This is an automated email from the ASF dual-hosted git repository.

apitrou pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow.git


The following commit(s) were added to refs/heads/main by this push:
     new 6f05288e2f GH-36449: [C++][CI] Don't use -g1 for Python jobs (#36453)
6f05288e2f is described below

commit 6f05288e2f0e39b71b51a7c4146916ad71e8ff4a
Author: Sutou Kouhei <[email protected]>
AuthorDate: Tue Jul 4 23:01:36 2023 +0900

    GH-36449: [C++][CI] Don't use -g1 for Python jobs (#36453)
    
    ### Rationale for this change
    
    Python jobs have tests for GDB plugin. We can't use -g1 for GDB plugin 
because -g1 doesn't provide enough debug information.
    
    ### What changes are included in this PR?
    
    Don't use `-g1` for Python jobs.
    
    ### Are these changes tested?
    
    Yes.
    
    ### Are there any user-facing changes?
    
    No.
    * Closes: #36449
    * Closes: #36455
    
    Lead-authored-by: Sutou Kouhei <[email protected]>
    Co-authored-by: Antoine Pitrou <[email protected]>
    Signed-off-by: Antoine Pitrou <[email protected]>
---
 ci/docker/linux-apt-python-3.dockerfile |  1 +
 ci/docker/linux-dnf-python-3.dockerfile |  1 +
 python/pyarrow/tests/test_cuda.py       | 22 ++++++++++++----------
 3 files changed, 14 insertions(+), 10 deletions(-)

diff --git a/ci/docker/linux-apt-python-3.dockerfile 
b/ci/docker/linux-apt-python-3.dockerfile
index d4e4dec36a..829c32e96b 100644
--- a/ci/docker/linux-apt-python-3.dockerfile
+++ b/ci/docker/linux-apt-python-3.dockerfile
@@ -53,6 +53,7 @@ ENV ARROW_ACERO=ON \
     ARROW_CSV=ON \
     ARROW_DATASET=ON \
     ARROW_FILESYSTEM=ON \
+    ARROW_GDB=ON \
     ARROW_HDFS=ON \
     ARROW_JSON=ON \
     ARROW_USE_GLOG=OFF
diff --git a/ci/docker/linux-dnf-python-3.dockerfile 
b/ci/docker/linux-dnf-python-3.dockerfile
index d1035255d3..98f9f64885 100644
--- a/ci/docker/linux-dnf-python-3.dockerfile
+++ b/ci/docker/linux-dnf-python-3.dockerfile
@@ -44,6 +44,7 @@ ENV ARROW_ACERO=ON \
     ARROW_CSV=ON \
     ARROW_DATASET=ON \
     ARROW_FILESYSTEM=ON \
+    ARROW_GDB=ON \
     ARROW_HDFS=ON \
     ARROW_JSON=ON \
     ARROW_USE_GLOG=OFF
diff --git a/python/pyarrow/tests/test_cuda.py 
b/python/pyarrow/tests/test_cuda.py
index 2ba2f82673..43cd16a3cf 100644
--- a/python/pyarrow/tests/test_cuda.py
+++ b/python/pyarrow/tests/test_cuda.py
@@ -349,27 +349,29 @@ def test_HostBuffer(size):
 
 @pytest.mark.parametrize("size", [0, 1, 1000])
 def test_copy_from_to_host(size):
-
     # Create a buffer in host containing range(size)
-    buf = pa.allocate_buffer(size, resizable=True)  # in host
+    dt = np.dtype('uint16')
+    nbytes = size * dt.itemsize
+    buf = pa.allocate_buffer(nbytes, resizable=True)  # in host
     assert isinstance(buf, pa.Buffer)
     assert not isinstance(buf, cuda.CudaBuffer)
-    arr = np.frombuffer(buf, dtype=np.uint8)
+    arr = np.frombuffer(buf, dtype=dt)
     assert arr.size == size
     arr[:] = range(size)
-    arr_ = np.frombuffer(buf, dtype=np.uint8)
+    arr_ = np.frombuffer(buf, dtype=dt)
     np.testing.assert_equal(arr, arr_)
 
-    device_buffer = global_context.new_buffer(size)
+    # Create a device buffer of the same size and copy from host
+    device_buffer = global_context.new_buffer(nbytes)
     assert isinstance(device_buffer, cuda.CudaBuffer)
     assert isinstance(device_buffer, pa.Buffer)
-    assert device_buffer.size == size
+    assert device_buffer.size == nbytes
     assert not device_buffer.is_cpu
+    device_buffer.copy_from_host(buf, position=0, nbytes=nbytes)
 
-    device_buffer.copy_from_host(buf, position=0, nbytes=size)
-
-    buf2 = device_buffer.copy_to_host(position=0, nbytes=size)
-    arr2 = np.frombuffer(buf2, dtype=np.uint8)
+    # Copy back to host and compare contents
+    buf2 = device_buffer.copy_to_host(position=0, nbytes=nbytes)
+    arr2 = np.frombuffer(buf2, dtype=dt)
     np.testing.assert_equal(arr, arr2)
 
 

Reply via email to