Skip to content

Commit 0564266

Browse files
committed
deps: update mininum support to Python 3.8, Pandas 1.5.0, Numpy 1.20.3
1 parent b6c1428 commit 0564266

12 files changed

+41
-223
lines changed

.github/sync-repo-settings.yaml

-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@ branchProtectionRules:
1111
- 'cla/google'
1212
- 'docs'
1313
- 'lint'
14-
- 'unit (3.8)'
1514
- 'unit (3.9)'
1615
- 'unit (3.10)'
1716
- 'unit (3.11)'

.github/workflows/unittest.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ jobs:
88
runs-on: ubuntu-latest
99
strategy:
1010
matrix:
11-
python: ['3.8', '3.9', '3.10', '3.11', '3.12']
11+
python: ['3.9', '3.10', '3.11', '3.12']
1212
steps:
1313
- name: Checkout
1414
uses: actions/checkout@v4
@@ -100,7 +100,7 @@ jobs:
100100
- name: Setup Python
101101
uses: actions/setup-python@v5
102102
with:
103-
python-version: "3.8"
103+
python-version: "3.9"
104104
- name: Install coverage
105105
run: |
106106
python -m pip install --upgrade setuptools pip wheel

db_dtypes/__init__.py

+10-43
Original file line numberDiff line numberDiff line change
@@ -18,20 +18,17 @@
1818
import datetime
1919
import re
2020
from typing import Optional, Union
21-
import warnings
2221

2322
import numpy
24-
import packaging.version
2523
import pandas
2624
import pandas.api.extensions
2725
from pandas.errors import OutOfBoundsDatetime
2826
import pyarrow
2927
import pyarrow.compute
3028

3129
from db_dtypes import core
30+
from db_dtypes.json import JSONArray, JSONDtype
3231
from db_dtypes.version import __version__
33-
from . import _versions_helpers
34-
3532

3633
date_dtype_name = "dbdate"
3734
time_dtype_name = "dbtime"
@@ -47,15 +44,6 @@
4744
_NP_BOX_DTYPE = "datetime64[us]"
4845

4946

50-
# To use JSONArray and JSONDtype, you'll need Pandas 1.5.0 or later. With the removal
51-
# of Python 3.7 compatibility, the minimum Pandas version will be updated to 1.5.0.
52-
if packaging.version.Version(pandas.__version__) >= packaging.version.Version("1.5.0"):
53-
from db_dtypes.json import JSONArray, JSONDtype
54-
else:
55-
JSONArray = None
56-
JSONDtype = None
57-
58-
5947
@pandas.api.extensions.register_extension_dtype
6048
class TimeDtype(core.BaseDatetimeDtype):
6149
"""
@@ -347,33 +335,12 @@ def __sub__(self, other):
347335
return super().__sub__(other)
348336

349337

350-
sys_major, sys_minor, sys_micro = _versions_helpers.extract_runtime_version()
351-
if sys_major == 3 and sys_minor in (7, 8):
352-
warnings.warn(
353-
"The python-bigquery library will stop supporting Python 3.7 "
354-
"and Python 3.8 in a future major release expected in Q4 2024. "
355-
f"Your Python version is {sys_major}.{sys_minor}.{sys_micro}. We "
356-
"recommend that you update soon to ensure ongoing support. For "
357-
"more details, see: [Google Cloud Client Libraries Supported Python Versions policy](https://cloud.google.com/python/docs/supported-python-versions)",
358-
PendingDeprecationWarning,
359-
)
360-
361-
362-
if not JSONArray or not JSONDtype:
363-
__all__ = [
364-
"__version__",
365-
"DateArray",
366-
"DateDtype",
367-
"TimeArray",
368-
"TimeDtype",
369-
]
370-
else:
371-
__all__ = [
372-
"__version__",
373-
"DateArray",
374-
"DateDtype",
375-
"JSONDtype",
376-
"JSONArray",
377-
"TimeArray",
378-
"TimeDtype",
379-
]
338+
__all__ = [
339+
"__version__",
340+
"DateArray",
341+
"DateDtype",
342+
"JSONDtype",
343+
"JSONArray",
344+
"TimeArray",
345+
"TimeDtype",
346+
]

db_dtypes/_versions_helpers.py

-32
This file was deleted.

db_dtypes/pandas_backports.py

+8-88
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,11 @@
1919
the versions in the later versions of pandas.
2020
"""
2121

22+
import operator
2223
from typing import Any
2324

24-
import numpy
2525
import packaging.version
2626
import pandas
27-
from pandas.api.types import is_integer
2827
import pandas.compat.numpy.function
2928
import pandas.core.nanops
3029

@@ -35,17 +34,15 @@
3534
nanany = pandas.core.nanops.nanany
3635
nanmax = pandas.core.nanops.nanmax
3736
nanmin = pandas.core.nanops.nanmin
37+
nanmedian = pandas.core.nanops.nanmedian
3838
numpy_validate_all = pandas.compat.numpy.function.validate_all
3939
numpy_validate_any = pandas.compat.numpy.function.validate_any
4040
numpy_validate_max = pandas.compat.numpy.function.validate_max
4141
numpy_validate_min = pandas.compat.numpy.function.validate_min
42+
numpy_validate_median = pandas.compat.numpy.function.validate_median
4243

43-
if pandas_release >= (1, 3):
44-
nanmedian = pandas.core.nanops.nanmedian
45-
numpy_validate_median = pandas.compat.numpy.function.validate_median
4644

47-
48-
def import_default(module_name, force=False, default=None):
45+
def import_default(module_name, default=None):
4946
"""
5047
Provide an implementation for a class or function when it can't be imported
5148
@@ -56,10 +53,7 @@ def import_default(module_name, force=False, default=None):
5653
"""
5754

5855
if default is None:
59-
return lambda func_or_class: import_default(module_name, force, func_or_class)
60-
61-
if force:
62-
return default
56+
return lambda func_or_class: import_default(module_name, func_or_class)
6357

6458
name = default.__name__
6559
module = __import__(module_name, {}, {}, [name])
@@ -73,85 +67,11 @@ def import_default(module_name, force=False, default=None):
7367
# 'datetime.date'
7468
@import_default("pandas.core.arraylike")
7569
class OpsMixin:
76-
def _cmp_method(self, other, op): # pragma: NO COVER
77-
return NotImplemented
70+
pass
7871

7972

8073
# TODO: use public API once pandas 1.5 / 2.x is released.
8174
# See: https://github.com/pandas-dev/pandas/pull/45544
82-
@import_default("pandas.core.arrays._mixins", pandas_release < (1, 3))
75+
@import_default("pandas.core.arrays._mixins")
8376
class NDArrayBackedExtensionArray(pandas.core.arrays.base.ExtensionArray):
84-
def __init__(self, values, dtype):
85-
assert isinstance(values, numpy.ndarray)
86-
self._ndarray = values
87-
self._dtype = dtype
88-
89-
@classmethod
90-
def _from_backing_data(cls, data):
91-
return cls(data, data.dtype)
92-
93-
def __getitem__(self, index):
94-
value = self._ndarray[index]
95-
if is_integer(index):
96-
return self._box_func(value)
97-
return self.__class__(value, self._dtype)
98-
99-
def __setitem__(self, index, value):
100-
self._ndarray[index] = self._validate_setitem_value(value)
101-
102-
def __len__(self):
103-
return len(self._ndarray)
104-
105-
@property
106-
def shape(self):
107-
return self._ndarray.shape
108-
109-
@property
110-
def ndim(self) -> int:
111-
return self._ndarray.ndim
112-
113-
@property
114-
def size(self) -> int:
115-
return self._ndarray.size
116-
117-
@property
118-
def nbytes(self) -> int:
119-
return self._ndarray.nbytes
120-
121-
def copy(self):
122-
return self[:]
123-
124-
def repeat(self, n):
125-
return self.__class__(self._ndarray.repeat(n), self._dtype)
126-
127-
def take(
128-
self,
129-
indices,
130-
*,
131-
allow_fill: bool = False,
132-
fill_value: Any = None,
133-
axis: int = 0,
134-
):
135-
from pandas.core.algorithms import take
136-
137-
if allow_fill:
138-
fill_value = self._validate_scalar(fill_value)
139-
140-
new_data = take(
141-
self._ndarray,
142-
indices,
143-
allow_fill=allow_fill,
144-
fill_value=fill_value,
145-
axis=axis,
146-
)
147-
return self._from_backing_data(new_data)
148-
149-
@classmethod
150-
def _concat_same_type(cls, to_concat, axis=0):
151-
dtypes = {str(x.dtype) for x in to_concat}
152-
if len(dtypes) != 1:
153-
raise ValueError("to_concat must have the same dtype (tz)", dtypes)
154-
155-
new_values = [x._ndarray for x in to_concat]
156-
new_values = numpy.concatenate(new_values, axis=axis)
157-
return to_concat[0]._from_backing_data(new_values) # type: ignore[arg-type]
77+
pass

setup.py

+4-6
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,10 @@
3030
release_status = "Development Status :: 5 - Production/Stable"
3131

3232
dependencies = [
33-
"packaging >= 17.0",
34-
"pandas >= 0.24.2",
33+
"packaging>=17.0",
34+
"pandas>=1.5.0",
3535
"pyarrow>=3.0.0",
36-
"numpy >= 1.16.6",
36+
"numpy>=1.20.3",
3737
]
3838

3939
package_root = os.path.abspath(os.path.dirname(__file__))
@@ -63,8 +63,6 @@ def readme():
6363
"License :: OSI Approved :: Apache Software License",
6464
"Programming Language :: Python",
6565
"Programming Language :: Python :: 3",
66-
"Programming Language :: Python :: 3.7",
67-
"Programming Language :: Python :: 3.8",
6866
"Programming Language :: Python :: 3.9",
6967
"Programming Language :: Python :: 3.10",
7068
"Programming Language :: Python :: 3.11",
@@ -74,6 +72,6 @@ def readme():
7472
],
7573
platforms="Posix; MacOS X; Windows",
7674
install_requires=dependencies,
77-
python_requires=">=3.7",
75+
python_requires=">=3.9",
7876
tests_require=["pytest"],
7977
)

testing/constraints-3.7.txt

-11
This file was deleted.

testing/constraints-3.8.txt

-2
This file was deleted.

testing/constraints-3.9.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
# Make sure we test with pandas 1.5.3. The Python version isn't that relevant.
22
pandas==1.5.3
3-
numpy==1.24.0
3+
numpy==1.20.3

tests/unit/test_date.py

-9
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
import pytest
2424

2525
import db_dtypes
26-
from db_dtypes import pandas_backports
2726

2827
VALUE_PARSING_TEST_CASES = [
2928
# Min/Max values for pandas.Timestamp.
@@ -258,10 +257,6 @@ def test_date_min_2d():
258257
)
259258

260259

261-
@pytest.mark.skipif(
262-
not hasattr(pandas_backports, "numpy_validate_median"),
263-
reason="median not available with this version of pandas",
264-
)
265260
@pytest.mark.parametrize(
266261
"values, expected",
267262
[
@@ -284,10 +279,6 @@ def test_date_median(values, expected):
284279
assert series.median() == expected
285280

286281

287-
@pytest.mark.skipif(
288-
not hasattr(pandas_backports, "numpy_validate_median"),
289-
reason="median not available with this version of pandas",
290-
)
291282
def test_date_median_2d():
292283
input_array = db_dtypes.DateArray(
293284
numpy.array(

0 commit comments

Comments
 (0)