Skip to content

Commit

Permalink
Closes #3616, #2321: Resolve deprecation warnings in make test-proto (
Browse files Browse the repository at this point in the history
#3621)

This PR (closes #3616 and closes #2321) resolves all the deprecation warnings that were being raised by `make test-proto`

Co-authored-by: Tess Hayes <stress-tess@users.noreply.github.com>
  • Loading branch information
stress-tess and stress-tess committed Aug 7, 2024
1 parent 4154602 commit b9227de
Show file tree
Hide file tree
Showing 5 changed files with 35 additions and 30 deletions.
32 changes: 16 additions & 16 deletions PROTO_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,35 +2,39 @@
import os
import importlib

import arkouda as ak

from server_util.test.server_test_util import (
TestRunningMode,
is_multilocale_arkouda, # TODO probably not needed
start_arkouda_server,
stop_arkouda_server,
)

os.environ["ARKOUDA_CLIENT_MODE"] = "API"


def pytest_addoption(parser):
parser.addoption(
"--optional-parquet", action="store_true", default=False, help="run optional parquet tests"
)
parser.addoption(
"--nl", action="store", default="2",
"--nl",
action="store",
default="2",
help="Number of Locales to run Arkouda with. "
"Defaults to 2. If Arkouda is not configured for multi_locale, 1 locale is used"
"Defaults to 2. If Arkouda is not configured for multi_locale, 1 locale is used",
)
parser.addoption(
"--size", action="store", default="10**8",
"--size",
action="store",
default="10**8",
help="Problem size: length of array to use for tests/benchmarks. For some cases, this will "
"be multiplied by the number of locales."
"be multiplied by the number of locales.",
)
parser.addoption(
"--seed", action="store", default="",
help="Value to initialize random number generator."
"--seed", action="store", default="", help="Value to initialize random number generator."
)


def pytest_collection_modifyitems(config, items):
if config.getoption("--optional-parquet"):
# --optional-parquet given in cli: do not skip optional parquet tests
Expand Down Expand Up @@ -80,11 +84,7 @@ def startup_teardown():
+ "environment and/or arkouda_server installation"
)
else:
print(
"in client stack test mode with host: {} port: {}".format(
pytest.server, pytest.port
)
)
print("in client stack test mode with host: {} port: {}".format(pytest.server, pytest.port))

yield

Expand All @@ -97,10 +97,10 @@ def startup_teardown():

@pytest.fixture(scope="class", autouse=True)
def manage_connection():
import arkouda as ak

try:
ak.connect(
server=pytest.server, port=pytest.port, timeout=pytest.timeout
)
ak.connect(server=pytest.server, port=pytest.port, timeout=pytest.timeout)
except Exception as e:
raise ConnectionError(e)

Expand Down
8 changes: 4 additions & 4 deletions PROTO_tests/tests/datetime_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,8 +300,8 @@ def test_date_time_accessors(self):
ak.Datetime(ak.date_range("2000-01-01 12:00:00", periods=100, freq="d")),
)
self.date_time_attribute_helper(
pd.Series(pd.date_range("1980-01-01 12:00:00", periods=100, freq="y")).dt,
ak.Datetime(ak.date_range("1980-01-01 12:00:00", periods=100, freq="y")),
pd.Series(pd.date_range("1980-01-01 12:00:00", periods=100, freq="YE")).dt,
ak.Datetime(ak.date_range("1980-01-01 12:00:00", periods=100, freq="YE")),
)

def time_delta_attribute_helper(self, pd_td, ak_td):
Expand All @@ -328,9 +328,9 @@ def test_woy_boundary(self):
# make sure weeks at year boundaries are correct, modified version of pandas test at
# https://github.com/pandas-dev/pandas/blob/main/pandas/tests/scalar/timestamp/test_timestamp.py
for date in "2013-12-31", "2008-12-28", "2009-12-31", "2010-01-01", "2010-01-03":
ak_week = ak.Datetime(ak.date_range(date, periods=10, freq="w")).week.to_list()
ak_week = ak.Datetime(ak.date_range(date, periods=10, freq="W")).week.to_list()
pd_week = (
pd.Series(pd.date_range(date, periods=10, freq="w")).dt.isocalendar().week.to_list()
pd.Series(pd.date_range(date, periods=10, freq="W")).dt.isocalendar().week.to_list()
)
assert ak_week == pd_week

Expand Down
16 changes: 9 additions & 7 deletions PROTO_tests/tests/io_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
from pandas.testing import assert_series_equal

import arkouda as ak
import arkouda.array_api as Array
from arkouda import io_util, read_zarr, to_zarr

NUMERIC_TYPES = ["int64", "float64", "bool", "uint64"]
Expand Down Expand Up @@ -621,8 +620,8 @@ def test_multi_col_write(self, par_test_base_tmp, comp):
pd.testing.assert_frame_equal(akdf.to_pandas(), rd_df.to_pandas())

# test save with index true
akdf.to_parquet(f"{tmp_dirname}/multi_col_parquet", index=True, compression=comp)
rd_data = ak.read_parquet(f"{tmp_dirname}/multi_col_parquet*")
akdf.to_parquet(f"{tmp_dirname}/idx_multi_col_parquet", index=True, compression=comp)
rd_data = ak.read_parquet(f"{tmp_dirname}/idx_multi_col_parquet*")
rd_df = ak.DataFrame(rd_data)
pd.testing.assert_frame_equal(akdf.to_pandas(), rd_df.to_pandas())

Expand Down Expand Up @@ -2378,13 +2377,13 @@ def test_import_hdf(self, import_export_base_tmp):
with tempfile.TemporaryDirectory(dir=import_export_base_tmp) as tmp_dirname:
file_name = f"{tmp_dirname}/import_hdf_test"

self.pddf.to_hdf(f"{file_name}_table.h5", "dataframe", format="Table", mode="w")
self.pddf.to_hdf(f"{file_name}_table.h5", key="dataframe", format="table", mode="w")
akdf = ak.import_data(f"{file_name}_table.h5", write_file=f"{file_name}_ak_table.h5")
assert len(glob.glob(f"{file_name}_ak_table*.h5")) == locales
assert self.pddf.equals(akdf.to_pandas())

self.pddf.to_hdf(
f"{file_name}_table_cols.h5", "dataframe", format="Table", data_columns=True, mode="w"
f"{file_name}_table_cols.h5", key="dataframe", format="table", data_columns=True, mode="w"
)
akdf = ak.import_data(
f"{file_name}_table_cols.h5", write_file=f"{file_name}_ak_table_cols.h5"
Expand All @@ -2393,7 +2392,7 @@ def test_import_hdf(self, import_export_base_tmp):
assert self.pddf.equals(akdf.to_pandas())

self.pddf.to_hdf(
f"{file_name}_fixed.h5", "dataframe", format="fixed", data_columns=True, mode="w"
f"{file_name}_fixed.h5", key="dataframe", format="fixed", data_columns=True, mode="w"
)
akdf = ak.import_data(f"{file_name}_fixed.h5", write_file=f"{file_name}_ak_fixed.h5")
assert len(glob.glob(f"{file_name}_ak_fixed*.h5")) == locales
Expand Down Expand Up @@ -2452,8 +2451,11 @@ def test_export_parquet(self, import_export_base_tmp):


class TestZarr:

@pytest.mark.skip
def test_zarr_read_write(self, zarr_test_base_tmp):
pytest.skip()
import arkouda.array_api as Array

shapes = [(10,), (20,)]
chunk_shapes = [(2,), (3,)]
dtypes = [ak.int64, ak.float64]
Expand Down
4 changes: 2 additions & 2 deletions arkouda/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
from enum import Enum
from typing import Dict, List, Mapping, Optional, Tuple, Union, cast

import pyfiglet # type: ignore

from arkouda import __version__, io_util, security
from arkouda.logger import LogLevel, getArkoudaLogger
from arkouda.message import (
Expand Down Expand Up @@ -200,6 +198,8 @@ def get_shell_mode():

# Print splash message if in UI mode
if mode == ClientMode.UI:
import pyfiglet # type: ignore

print("{}".format(pyfiglet.figlet_format("Arkouda")))
print(f"Client Version: {__version__}") # type: ignore

Expand Down
5 changes: 4 additions & 1 deletion arkouda/pdarrayclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -798,7 +798,10 @@ def opeq(self, other, op):
# pdarray binop scalar
# opeq requires scalar to be cast as pdarray dtype
try:
other = self.dtype.type(other)
if self.dtype != bigint:
other = np.array([other]).astype(self.dtype)[0]
else:
other = self.dtype.type(other)
except Exception:
# Can't cast other as dtype of pdarray
raise TypeError(f"Unhandled scalar type: {other} ({type(other)})")
Expand Down

0 comments on commit b9227de

Please sign in to comment.