Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
refactor: fixes for numpy-2.0 deprecation warnings, require numpy>=1.…
Browse files Browse the repository at this point in the history
…20.3
mwtoews committed Jun 17, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature.
1 parent 5cdd609 commit 3ec1915
Showing 18 changed files with 55 additions and 61 deletions.
2 changes: 1 addition & 1 deletion .docs/Notebooks/zonebudget_example.py
Original file line number Diff line number Diff line change
@@ -111,7 +111,7 @@
inyrbud = inyr.get_budget()

names = ["FROM_RECHARGE"]
rowidx = np.in1d(cmdbud["name"], names)
rowidx = np.isin(cmdbud["name"], names)
colidx = "ZONE_1"

print(f"{cmdbud[rowidx][colidx][0]:,.1f} cubic meters/day")
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -35,7 +35,7 @@ Installation
FloPy requires **Python** 3.8+ with:

```
numpy >=1.15.0,<2.0.0
numpy >=1.20.3,<2.0.0
matplotlib >=1.4.0
pandas >=2.0.0
```
2 changes: 1 addition & 1 deletion autotest/test_mf6.py
Original file line number Diff line number Diff line change
@@ -102,7 +102,7 @@ def write_head(
("kper", "i4"),
("pertim", "f8"),
("totim", "f8"),
("text", "a16"),
("text", "S16"),
("ncol", "i4"),
("nrow", "i4"),
("ilay", "i4"),
2 changes: 1 addition & 1 deletion autotest/test_mp6.py
Original file line number Diff line number Diff line change
@@ -175,7 +175,7 @@ def test_get_destination_data(function_tmpdir, mp6_test_path):
np.array(well_pthld)[["k", "i", "j"]].tolist(),
dtype=starting_locs.dtype,
)
assert np.all(np.in1d(starting_locs, pathline_locs))
assert np.all(np.isin(starting_locs, pathline_locs))

# test writing a shapefile of endpoints
epd.write_shapefile(
14 changes: 7 additions & 7 deletions autotest/test_particledata.py
Original file line number Diff line number Diff line change
@@ -83,7 +83,7 @@ def test_particledata_structured_ctor_with_partlocs_as_list_of_tuples():
assert isinstance(data.particledata, pd.DataFrame)
assert np.array_equal(
data.particledata.to_records(index=False),
np.core.records.fromrecords(
np.rec.fromrecords(
[
(0, 1, 1, 0.5, 0.5, 0.5, 0.0, 0),
(0, 1, 2, 0.5, 0.5, 0.5, 0.0, 0),
@@ -102,7 +102,7 @@ def test_particledata_structured_ctor_with_partlocs_as_ndarray():
assert isinstance(data.particledata, pd.DataFrame)
assert np.array_equal(
data.particledata.to_records(index=False),
np.core.records.fromrecords(
np.rec.fromrecords(
[
(0, 1, 1, 0.5, 0.5, 0.5, 0.0, 0),
(0, 1, 2, 0.5, 0.5, 0.5, 0.0, 0),
@@ -121,7 +121,7 @@ def test_particledata_unstructured_ctor_with_partlocs_as_ndarray():
assert isinstance(data.particledata, pd.DataFrame)
assert np.array_equal(
data.particledata.to_records(index=False),
np.core.records.fromrecords(
np.rec.fromrecords(
[
(0, 0.5, 0.5, 0.5, 0.0, 0),
(1, 0.5, 0.5, 0.5, 0.0, 0),
@@ -141,7 +141,7 @@ def test_particledata_unstructured_ctor_with_partlocs_as_list():
assert isinstance(data.particledata, pd.DataFrame)
assert np.array_equal(
data.particledata.to_records(index=False),
np.core.records.fromrecords(
np.rec.fromrecords(
[
(0, 0.5, 0.5, 0.5, 0.0, 0),
(1, 0.5, 0.5, 0.5, 0.0, 0),
@@ -161,7 +161,7 @@ def test_particledata_unstructured_ctor_with_partlocs_as_ndarray():
assert isinstance(data.particledata, pd.DataFrame)
assert np.array_equal(
data.particledata.to_records(index=False),
np.core.records.fromrecords(
np.rec.fromrecords(
[
(0, 0.5, 0.5, 0.5, 0.0, 0),
(1, 0.5, 0.5, 0.5, 0.0, 0),
@@ -181,7 +181,7 @@ def test_particledata_structured_ctor_with_partlocs_as_list_of_lists():
assert isinstance(data.particledata, pd.DataFrame)
assert np.array_equal(
data.particledata.to_records(index=False),
np.core.records.fromrecords(
np.rec.fromrecords(
[
(0, 1, 1, 0.5, 0.5, 0.5, 0.0, 0),
(0, 1, 2, 0.5, 0.5, 0.5, 0.0, 0),
@@ -212,7 +212,7 @@ def test_particledata_to_prp_dis_1():
) # each coord should be a tuple (irpt, k, i, j, x, y, z)

# expected
exp = np.core.records.fromrecords(
exp = np.rec.fromrecords(
[
(0, 1, 1, 0.5, 0.5, 0.5, 0.0, 0),
(0, 1, 2, 0.5, 0.5, 0.5, 0.0, 0),
4 changes: 2 additions & 2 deletions etc/environment.yml
Original file line number Diff line number Diff line change
@@ -6,8 +6,9 @@ dependencies:

# required
- python>=3.8
- numpy>=1.15.0,<2.0.0
- numpy>=1.20.3,<2.0.0
- matplotlib>=1.4.0
- pandas>=2.0.0

# lint
- cffconvert
@@ -33,7 +34,6 @@ dependencies:
# optional
- affine
- scipy
- pandas
- netcdf4
- pyshp
- rasterio
4 changes: 2 additions & 2 deletions flopy/modflow/mffhb.py
Original file line number Diff line number Diff line change
@@ -210,7 +210,7 @@ def __init__(
ds5 = ds5.to_records(index=False)
# convert numpy array to a recarray
if ds5.dtype != dtype:
ds5 = np.core.records.fromarrays(ds5.transpose(), dtype=dtype)
ds5 = np.rec.fromarrays(ds5.transpose(), dtype=dtype)

# assign dataset 5
self.ds5 = ds5
@@ -229,7 +229,7 @@ def __init__(
ds7 = ds7.to_records(index=False)
# convert numpy array to a recarray
if ds7.dtype != dtype:
ds7 = np.core.records.fromarrays(ds7.transpose(), dtype=dtype)
ds7 = np.rec.fromarrays(ds7.transpose(), dtype=dtype)

# assign dataset 7
self.ds7 = ds7
2 changes: 1 addition & 1 deletion flopy/modflow/mfgage.py
Original file line number Diff line number Diff line change
@@ -131,7 +131,7 @@ def __init__(
# convert gage_data to a recarray, if necessary
if isinstance(gage_data, np.ndarray):
if not gage_data.dtype == dtype:
gage_data = np.core.records.fromarrays(
gage_data = np.rec.fromarrays(
gage_data.transpose(), dtype=dtype
)
elif isinstance(gage_data, pd.DataFrame):
6 changes: 2 additions & 4 deletions flopy/modflow/mfsfr2.py
Original file line number Diff line number Diff line change
@@ -1372,9 +1372,7 @@ def get_variable_by_stress_period(self, varname):
all_data[inds, per] = self.segment_data[per][varname]
dtype.append((f"{varname}{per}", float))
isvar = all_data.sum(axis=1) != 0
ra = np.core.records.fromarrays(
all_data[isvar].transpose().copy(), dtype=dtype
)
ra = np.rec.fromarrays(all_data[isvar].transpose().copy(), dtype=dtype)
segs = self.segment_data[0].nseg[isvar]
isseg = np.array(
[True if s in segs else False for s in self.reach_data.iseg]
@@ -1387,7 +1385,7 @@ def get_variable_by_stress_period(self, varname):
return ra.view(np.recarray)

def repair_outsegs(self):
isasegment = np.in1d(
isasegment = np.isin(
self.segment_data[0].outseg, self.segment_data[0].nseg
)
isasegment = isasegment | (self.segment_data[0].outseg < 0)
8 changes: 2 additions & 6 deletions flopy/modflow/mfstr.py
Original file line number Diff line number Diff line change
@@ -371,9 +371,7 @@ def __init__(
)
assert d.dtype == self.dtype, e
elif isinstance(d, np.ndarray):
d = np.core.records.fromarrays(
d.transpose(), dtype=self.dtype
)
d = np.rec.fromarrays(d.transpose(), dtype=self.dtype)
elif isinstance(d, int):
if model.verbose:
if d < 0:
@@ -404,9 +402,7 @@ def __init__(
)
assert d.dtype == self.dtype2, e
elif isinstance(d, np.ndarray):
d = np.core.records.fromarrays(
d.transpose(), dtype=self.dtype2
)
d = np.rec.fromarrays(d.transpose(), dtype=self.dtype2)
elif isinstance(d, int):
if model.verbose:
if d < 0:
6 changes: 3 additions & 3 deletions flopy/plot/plotutil.py
Original file line number Diff line number Diff line change
@@ -2732,7 +2732,7 @@ def to_mp7_pathlines(
data = data.to_records(index=False)

# build mp7 format recarray
ret = np.core.records.fromarrays(
ret = np.rec.fromarrays(
[
data[seqn_key],
data["iprp"],
@@ -2841,7 +2841,7 @@ def to_mp7_endpoints(
endpts = endpts.to_records(index=False)

# build mp7 format recarray
ret = np.core.records.fromarrays(
ret = np.rec.fromarrays(
[
endpts["sequencenumber"],
endpts["iprp"],
@@ -2928,7 +2928,7 @@ def to_prt_pathlines(
data = data.to_records(index=False)

# build prt format recarray
ret = np.core.records.fromarrays(
ret = np.rec.fromarrays(
[
data["stressperiod"],
data["timestep"],
12 changes: 6 additions & 6 deletions flopy/utils/binaryfile.py
Original file line number Diff line number Diff line change
@@ -299,7 +299,7 @@ def binaryread_struct(file, vartype, shape=(1,), charlen=16):
# find the number of bytes for one value
numbytes = vartype(1).nbytes
# find the number of values
nval = np.core.fromnumeric.prod(shape)
nval = np.prod(shape)
fmt = str(nval) + fmt
s = file.read(numbytes * nval)
result = struct.unpack(fmt, s)
@@ -1138,7 +1138,7 @@ def _set_precision(self, precision="single"):
h1dt = [
("kstp", "i4"),
("kper", "i4"),
("text", "a16"),
("text", "S16"),
("ncol", "i4"),
("nrow", "i4"),
("nlay", "i4"),
@@ -1161,10 +1161,10 @@ def _set_precision(self, precision="single"):
("delt", ffmt),
("pertim", ffmt),
("totim", ffmt),
("modelnam", "a16"),
("paknam", "a16"),
("modelnam2", "a16"),
("paknam2", "a16"),
("modelnam", "S16"),
("paknam", "S16"),
("modelnam2", "S16"),
("paknam2", "S16"),
]
self.header1_dtype = np.dtype(h1dt)
self.header2_dtype0 = np.dtype(h2dt0)
12 changes: 6 additions & 6 deletions flopy/utils/datafile.py
Original file line number Diff line number Diff line change
@@ -43,7 +43,7 @@ def __init__(self, filetype=None, precision="single"):
("kper", "i4"),
("pertim", floattype),
("totim", floattype),
("text", "a16"),
("text", "S16"),
("ncol", "i4"),
("nrow", "i4"),
("ilay", "i4"),
@@ -56,7 +56,7 @@ def __init__(self, filetype=None, precision="single"):
("kper", "i4"),
("pertim", floattype),
("totim", floattype),
("text", "a16"),
("text", "S16"),
("ncol", "i4"),
("nrow", "i4"),
("ilay", "i4"),
@@ -69,7 +69,7 @@ def __init__(self, filetype=None, precision="single"):
("kstp", "i4"),
("kper", "i4"),
("totim", floattype),
("text", "a16"),
("text", "S16"),
("ncol", "i4"),
("nrow", "i4"),
("ilay", "i4"),
@@ -82,7 +82,7 @@ def __init__(self, filetype=None, precision="single"):
("kper", "i4"),
("pertim", floattype),
("totim", floattype),
("text", "a16"),
("text", "S16"),
("m1", "i4"),
("m2", "i4"),
("m3", "i4"),
@@ -95,7 +95,7 @@ def __init__(self, filetype=None, precision="single"):
("kper", "i4"),
("pertim", floattype),
("totim", floattype),
("text", "a16"),
("text", "S16"),
("m1", "i4"),
("m2", "i4"),
("m3", "i4"),
@@ -108,7 +108,7 @@ def __init__(self, filetype=None, precision="single"):
("kper", "i4"),
("pertim", floattype),
("totim", floattype),
("text", "a16"),
("text", "S16"),
("m1", "i4"),
("m2", "i4"),
("m3", "i4"),
6 changes: 3 additions & 3 deletions flopy/utils/modpathfile.py
Original file line number Diff line number Diff line change
@@ -124,12 +124,12 @@ def intersect(
cells = t

cells = np.array(cells, dtype=raslice.dtype)
inds = np.in1d(raslice, cells)
inds = np.isin(raslice, cells)
epdest = self._data[inds].copy().view(np.recarray)

if to_recarray:
# use particle ids to get the rest of the paths
inds = np.in1d(self._data["particleid"], epdest.particleid)
inds = np.isin(self._data["particleid"], epdest.particleid)
series = self._data[inds].copy()
series.sort(order=["particleid", "time"])
series = series.view(np.recarray)
@@ -693,7 +693,7 @@ def get_destination_endpoint_data(self, dest_cells, source=False):
dtype = np.dtype(dtype)
dest_cells = np.array(dest_cells, dtype=dtype)

inds = np.in1d(raslice, dest_cells)
inds = np.isin(raslice, dest_cells)
return data[inds].copy().view(np.recarray)

def write_shapefile(
4 changes: 2 additions & 2 deletions flopy/utils/recarray_utils.py
Original file line number Diff line number Diff line change
@@ -62,7 +62,7 @@ def ra_slice(ra, cols):
--------
>>> import numpy as np
>>> from flopy.utils import ra_slice
>>> a = np.core.records.fromrecords([("a", 1, 1.1), ("b", 2, 2.1)])
>>> a = np.rec.fromrecords([("a", 1, 1.1), ("b", 2, 2.1)])
>>> ra_slice(a, ['f0', 'f1'])
rec.array([('a', 1), ('b', 2)],
dtype=[('f0', '<U1'), ('f1', '<i4')])
@@ -75,7 +75,7 @@ def recarray(array, dtype):
Convert a list of lists or tuples to a recarray.
.. deprecated:: 3.5
Use numpy.core.records.fromrecords instead
Use numpy.rec.fromrecords instead
Parameters
----------
2 changes: 1 addition & 1 deletion flopy/utils/util_list.py
Original file line number Diff line number Diff line change
@@ -419,7 +419,7 @@ def __cast_ndarray(self, kper, d):
f"dtype len: {len(self.dtype)}"
)
try:
self.__data[kper] = np.core.records.fromarrays(
self.__data[kper] = np.rec.fromarrays(
d.transpose(), dtype=self.dtype
)
except Exception as e:
Loading

0 comments on commit 3ec1915

Please sign in to comment.