Skip to content

Commit

Permalink
update upstream patch
Browse files Browse the repository at this point in the history
  • Loading branch information
h-vetinari committed Jun 12, 2024
1 parent 15ba1c9 commit cd06ad3
Show file tree
Hide file tree
Showing 7 changed files with 83 additions and 65 deletions.
4 changes: 1 addition & 3 deletions recipe/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,8 @@ source:
- patches/0002-MINOR-Python-try-harder-to-set-up-s3_server-fixture-.patch
# backport https://github.com/apache/arrow/pull/41768
- patches/0003-MINOR-Python-Increase-timeout-in-TestThreadedCSVTabl.patch
# fix for https://github.com/apache/arrow/issues/41319
- patches/0004-fix-skip-for-numpy-2.0-to-include-rc-s.patch
# backport https://github.com/apache/arrow/pull/42099
- patches/0005-GH-41924-Python-Fix-tests-when-using-NumPy-2.0-on-Wi.patch
- patches/0004-GH-41924-Python-Fix-tests-when-using-NumPy-2.0-on-Wi.patch
# testing-submodule not part of release tarball
- git_url: https://github.com/apache/arrow-testing.git
git_rev: 25d16511e8d42c2744a1d94d90169e3a36e92631
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
From 0ee0f802918c8627ad67c1cfb3c75d492fda8f49 Mon Sep 17 00:00:00 2001
From: "H. Vetinari" <[email protected]>
Date: Wed, 13 Sep 2023 21:34:29 +1100
Subject: [PATCH 1/5] fixture teardown should not fail test
Subject: [PATCH 1/4] fixture teardown should not fail test

---
python/pyarrow/tests/test_fs.py | 10 ++++++++--
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
From 4c694d19a32bda2d1745c643661c491bb746cc9a Mon Sep 17 00:00:00 2001
From: h-vetinari <[email protected]>
Date: Fri, 24 May 2024 02:18:29 +1100
Subject: [PATCH 2/5] MINOR: [Python] try harder to set up s3_server fixture
Subject: [PATCH 2/4] MINOR: [Python] try harder to set up s3_server fixture
(#41754)

In conda-forge, when running the aarch tests in emulation, we regularly run into the [issue](https://github.com/conda-forge/pyarrow-feedstock/issues/117) that the fixture setup fails. Extending the timeouts fixes this. Since it only happens once per session, it doesn't hurt to take a little bit more time.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
From a013f41d4809b7a6c8ef887eab9159fc32b35571 Mon Sep 17 00:00:00 2001
From: h-vetinari <[email protected]>
Date: Fri, 24 May 2024 02:11:18 +1100
Subject: [PATCH 3/5] MINOR: [Python] Increase timeout in
Subject: [PATCH 3/4] MINOR: [Python] Increase timeout in
TestThreadedCSVTableRead::test_cancellation (#41768)

We hit this in conda-forge on some runners:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
From c7e74e697a66537c2a88b5ed14c7fe3bc48b2e6a Mon Sep 17 00:00:00 2001
From: Joris Van den Bossche <[email protected]>
Date: Tue, 11 Jun 2024 12:09:08 +0200
Subject: [PATCH 4/4] GH-41924: [Python] Fix tests when using NumPy 2.0 on
Windows

---
python/pyarrow/tests/parquet/common.py | 2 +-
python/pyarrow/tests/test_array.py | 2 +-
python/pyarrow/tests/test_pandas.py | 2 +-
python/pyarrow/tests/test_table.py | 2 +-
python/scripts/test_leak.py | 2 +-
5 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/python/pyarrow/tests/parquet/common.py b/python/pyarrow/tests/parquet/common.py
index 8365ed5b28..c3094ee20b 100644
--- a/python/pyarrow/tests/parquet/common.py
+++ b/python/pyarrow/tests/parquet/common.py
@@ -83,7 +83,7 @@ def _random_integers(size, dtype):
iinfo = np.iinfo(dtype)
return np.random.randint(max(iinfo.min, platform_int_info.min),
min(iinfo.max, platform_int_info.max),
- size=size).astype(dtype)
+ size=size, dtype=dtype)


def _range_integers(size, dtype):
diff --git a/python/pyarrow/tests/test_array.py b/python/pyarrow/tests/test_array.py
index 156d58326b..dd1d484cc7 100644
--- a/python/pyarrow/tests/test_array.py
+++ b/python/pyarrow/tests/test_array.py
@@ -3323,7 +3323,7 @@ def test_numpy_array_protocol():
result = np.asarray(arr)
np.testing.assert_array_equal(result, expected)

- if Version(np.__version__) < Version("2.0"):
+ if Version(np.__version__) < Version("2.0.0.dev0"):
# copy keyword is not strict and not passed down to __array__
result = np.array(arr, copy=False)
np.testing.assert_array_equal(result, expected)
diff --git a/python/pyarrow/tests/test_pandas.py b/python/pyarrow/tests/test_pandas.py
index 3678b4e57a..5071058848 100644
--- a/python/pyarrow/tests/test_pandas.py
+++ b/python/pyarrow/tests/test_pandas.py
@@ -769,7 +769,7 @@ class TestConvertPrimitiveTypes:
info = np.iinfo(dtype)
values = np.random.randint(max(info.min, np.iinfo(np.int_).min),
min(info.max, np.iinfo(np.int_).max),
- size=num_values)
+ size=num_values, dtype=dtype)
data[dtype] = values.astype(dtype)
fields.append(pa.field(dtype, arrow_dtype))

diff --git a/python/pyarrow/tests/test_table.py b/python/pyarrow/tests/test_table.py
index e11efa64db..c44414d0bc 100644
--- a/python/pyarrow/tests/test_table.py
+++ b/python/pyarrow/tests/test_table.py
@@ -3244,7 +3244,7 @@ def test_numpy_array_protocol(constructor):
table = constructor([[1, 2, 3], [4.0, 5.0, 6.0]], names=["a", "b"])
expected = np.array([[1, 4], [2, 5], [3, 6]], dtype="float64")

- if Version(np.__version__) < Version("2.0"):
+ if Version(np.__version__) < Version("2.0.0.dev0"):
# copy keyword is not strict and not passed down to __array__
result = np.array(table, copy=False)
np.testing.assert_array_equal(result, expected)
diff --git a/python/scripts/test_leak.py b/python/scripts/test_leak.py
index f2bbe8d051..86a87f5e74 100644
--- a/python/scripts/test_leak.py
+++ b/python/scripts/test_leak.py
@@ -98,7 +98,7 @@ def test_leak3():


def test_ARROW_8801():
- x = pd.to_datetime(np.random.randint(0, 2**32, size=2**20),
+ x = pd.to_datetime(np.random.randint(0, 2**32, size=2**20, dtype=np.int64),
unit='ms', utc=True)
table = pa.table(pd.DataFrame({'x': x}))

36 changes: 0 additions & 36 deletions recipe/patches/0004-fix-skip-for-numpy-2.0-to-include-rc-s.patch

This file was deleted.

This file was deleted.

0 comments on commit cd06ad3

Please sign in to comment.