Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add windows CI #151

Merged
merged 33 commits into from
Nov 5, 2022
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
f9c3b0c
Add windows CI
Illviljan Sep 23, 2022
303149e
Update ci.yaml
Illviljan Sep 23, 2022
66561ba
Update ci.yaml
Illviljan Sep 23, 2022
78bced3
Make arg input the same as shown in pytest
Illviljan Sep 24, 2022
1d93178
Add dtype check
Illviljan Sep 24, 2022
d13e475
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Sep 24, 2022
11df4b4
have expected and actual results on the same side
Illviljan Sep 24, 2022
be224b8
use np.intp for count expected
Illviljan Sep 24, 2022
11af758
Merge remote-tracking branch 'upstream/main' into add_windows_ci
Illviljan Oct 9, 2022
c7d809f
Merge remote-tracking branch 'upstream/main' into add_windows_ci
Illviljan Oct 11, 2022
f993b31
[revert] minimize test
dcherian Oct 11, 2022
7559cce
specify dtypes
dcherian Oct 11, 2022
411db8a
more fixers
dcherian Oct 11, 2022
a9c7cae
more.
dcherian Oct 11, 2022
bed84c2
Fix groupby_reduce
dcherian Oct 12, 2022
232cf15
[revert] only wiindows tests
dcherian Oct 12, 2022
7ecb267
more fixes?
dcherian Oct 12, 2022
0df2daf
more fixes.
dcherian Oct 12, 2022
3a86969
more fix
dcherian Oct 12, 2022
f63e3d3
Last fix?
dcherian Oct 12, 2022
09c1297
Update .github/workflows/ci.yaml
dcherian Oct 12, 2022
b0e156d
Merge remote-tracking branch 'upstream/main' into add_windows_ci
Illviljan Oct 14, 2022
5db76c8
Merge remote-tracking branch 'upstream/main' into add_windows_ci
Illviljan Nov 2, 2022
3b79f6e
revert
dcherian Nov 4, 2022
5cd7bcb
Better fix
dcherian Nov 5, 2022
38438a2
Revert "revert"
dcherian Nov 5, 2022
d60a965
better comment.
dcherian Nov 5, 2022
0c733c4
clean up test
dcherian Nov 5, 2022
d5eb5d9
Revert "Revert "revert""
dcherian Nov 5, 2022
f287ed6
xfail labels dtype test
dcherian Nov 5, 2022
e842829
Revert "[revert] only wiindows tests"
dcherian Nov 5, 2022
85475ca
Revert "[revert] minimize test"
dcherian Nov 5, 2022
8867eb9
fix bad revert
dcherian Nov 5, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
strategy:
fail-fast: false
matrix:
os: ["ubuntu-latest"]
os: ["ubuntu-latest", "windows-latest"]
python-version: ["3.8", "3.10"]
steps:
- uses: actions/checkout@v3
Expand All @@ -43,11 +43,14 @@ jobs:
python="${{ matrix.python-version }}"
- name: Install flox
run: |
python -m pip install -e .
conda list
python -m pip install --no-deps -e .
- name: Run Tests
run: |
pytest -n auto --cov=./ --cov-report=xml
- name: Version info
run: |
conda info -a
conda list
dcherian marked this conversation as resolved.
Show resolved Hide resolved
- name: Upload code coverage to Codecov
uses: codecov/codecov-action@v3.1.1
with:
Expand Down
2 changes: 1 addition & 1 deletion tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def assert_equal(a, b, tolerance=None):
np.testing.assert_allclose(a, b, equal_nan=True, **tolerance)


@pytest.fixture(scope="module", params=["flox", "numpy", "numba"])
@pytest.fixture(scope="module", params=["numpy"])
def engine(request):
if request.param == "numba":
try:
Expand Down
71 changes: 35 additions & 36 deletions tests/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,25 +45,25 @@ def dask_array_ones(*args):
"sum",
"nansum",
"prod",
"nanprod",
"mean",
"nanmean",
"var",
"nanvar",
"std",
"nanstd",
"max",
"nanmax",
"min",
"nanmin",
"argmax",
pytest.param("nanargmax", marks=(pytest.mark.skip,)),
"argmin",
pytest.param("nanargmin", marks=(pytest.mark.skip,)),
"any",
"all",
pytest.param("median", marks=(pytest.mark.skip,)),
pytest.param("nanmedian", marks=(pytest.mark.skip,)),
# "nanprod",
# "mean",
# "nanmean",
# "var",
# "nanvar",
# "std",
# "nanstd",
# "max",
# "nanmax",
# "min",
# "nanmin",
# "argmax",
# pytest.param("nanargmax", marks=(pytest.mark.skip,)),
# "argmin",
# pytest.param("nanargmin", marks=(pytest.mark.skip,)),
# "any",
# "all",
# pytest.param("median", marks=(pytest.mark.skip,)),
# pytest.param("nanmedian", marks=(pytest.mark.skip,)),
)

if TYPE_CHECKING:
Expand Down Expand Up @@ -360,13 +360,12 @@ def test_groupby_agg_dask(func, shape, array_chunks, group_chunks, add_nan, dtyp
kwargs["expected_groups"] = [0, 2, 1]
with raise_if_dask_computes():
actual, groups = groupby_reduce(array, by, engine=engine, **kwargs, sort=False)
assert_equal(groups, [0, 2, 1])
assert_equal(groups, np.array([0, 2, 1], dtype=np.intp))
assert_equal(expected, actual[..., [0, 2, 1]])

kwargs["expected_groups"] = [0, 2, 1]
with raise_if_dask_computes():
actual, groups = groupby_reduce(array, by, engine=engine, **kwargs, sort=True)
assert_equal(groups, [0, 1, 2])
assert_equal(groups, np.array([0, 1, 2], np.intp))
assert_equal(expected, actual)


Expand Down Expand Up @@ -406,7 +405,7 @@ def test_numpy_reduce_axis_subset(engine):
def test_dask_reduce_axis_subset():

by = labels2d
array = np.ones_like(by)
array = np.ones_like(by, dtype=np.int64)
with raise_if_dask_computes():
result, _ = groupby_reduce(
da.from_array(array, chunks=(2, 3)),
Expand All @@ -415,11 +414,11 @@ def test_dask_reduce_axis_subset():
axis=1,
expected_groups=[0, 2],
)
assert_equal(result, [[2, 3], [2, 3]])
assert_equal(result, np.array([[2, 3], [2, 3]], dtype=np.int64))

by = np.broadcast_to(labels2d, (3, *labels2d.shape))
array = np.ones_like(by)
subarr = np.array([[1, 1], [1, 1], [123, 2], [1, 1], [1, 1]])
subarr = np.array([[1, 1], [1, 1], [123, 2], [1, 1], [1, 1]], dtype=np.int64)
expected = np.tile(subarr, (3, 1, 1))
with raise_if_dask_computes():
result, _ = groupby_reduce(
Expand All @@ -432,7 +431,7 @@ def test_dask_reduce_axis_subset():
)
assert_equal(result, expected)

subarr = np.array([[2, 3], [2, 3]])
subarr = np.array([[2, 3], [2, 3]], dtype=np.int64)
expected = np.tile(subarr, (3, 1, 1))
with raise_if_dask_computes():
result, _ = groupby_reduce(
Expand Down Expand Up @@ -634,7 +633,7 @@ def test_groupby_bins(chunk_labels, chunks, engine, method) -> None:
engine=engine,
method=method,
)
expected = np.array([3, 1, 0])
expected = np.array([3, 1, 0], dtype=np.int64)
for left, right in zip(groups, pd.IntervalIndex.from_arrays([1, 2, 4], [2, 4, 5]).to_numpy()):
assert left == right
assert_equal(actual, expected)
Expand Down Expand Up @@ -753,12 +752,12 @@ def test_dtype_preservation(dtype, func, engine):
@requires_dask
@pytest.mark.parametrize("method", ["split-reduce", "map-reduce", "cohorts"])
def test_cohorts(method):
repeats = [4, 4, 12, 2, 3, 4]
labels = np.repeat(np.arange(6), repeats)
repeats = np.array([4, 4, 12, 2, 3, 4], np.int32)
labels = np.repeat(np.arange(6), repeats).astype(np.int32)
array = dask.array.from_array(labels, chunks=(4, 8, 4, 9, 4))

actual, actual_groups = groupby_reduce(array, labels, func="count", method=method)
assert_equal(actual_groups, np.arange(6))
assert_equal(actual_groups, np.arange(6, dtype=np.int32))
assert_equal(actual, repeats)


Expand Down Expand Up @@ -796,9 +795,9 @@ def test_cohorts_nd_by(func, method, axis, engine):

actual, groups = groupby_reduce(array, by, sort=False, **kwargs)
if method == "map-reduce":
assert_equal(groups, [1, 30, 2, 31, 3, 4, 40])
assert_equal(groups, np.array([1, 30, 2, 31, 3, 4, 40], dtype=by.dtype))
else:
assert_equal(groups, [1, 30, 2, 31, 3, 40, 4])
assert_equal(groups, np.array([1, 30, 2, 31, 3, 40, 4], dtype=by.dtype))
reindexed = reindex_(actual, groups, pd.Index(sorted_groups))
assert_equal(reindexed, expected)

Expand Down Expand Up @@ -970,18 +969,18 @@ def test_factorize_reindex_sorting_ints():
)

expected = factorize_(**kwargs, reindex=True, sort=True)[0]
assert_equal(expected, [6, 1, 6, 2, 3, 5])
assert_equal(expected, np.array([6, 1, 6, 2, 3, 5], dtype=np.int64))

expected = factorize_(**kwargs, reindex=True, sort=False)[0]
assert_equal(expected, [6, 1, 6, 2, 3, 5])
assert_equal(expected, np.array([6, 1, 6, 2, 3, 5], dtype=np.int64))

kwargs["expected_groups"] = (np.arange(5, -1, -1),)

expected = factorize_(**kwargs, reindex=True, sort=True)[0]
assert_equal(expected, [6, 1, 6, 2, 3, 5])
assert_equal(expected, np.array([6, 1, 6, 2, 3, 5], dtype=np.int64))

expected = factorize_(**kwargs, reindex=True, sort=False)[0]
assert_equal(expected, [6, 4, 6, 3, 2, 0])
assert_equal(expected, np.array([6, 4, 6, 3, 2, 0], dtype=np.int64))


@requires_dask
Expand Down