-
-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Closed
Labels
Description
Upstream dev failing: https://travis-ci.org/github/dask/dask/jobs/680572761#L1224
sparse / numba
Some of these looks like they'll be fixed by updating to the latest version of numba.
[gw2] linux -- Python 3.8.2 /home/travis/miniconda/envs/test-environment/bin/python
def test_from_array_meta():
sparse = pytest.importorskip("sparse")
x = np.ones(10)
meta = sparse.COO.from_numpy(x)
y = da.from_array(x, meta=meta)
> assert isinstance(y._meta, sparse.COO)
E AssertionError: assert False
E + where False = isinstance(array([], dtype=float64), <class 'sparse._coo.core.COO'>)
E + where array([], dtype=float64) = dask.array<array, shape=(10,), dtype=float64, chunksize=(10,), chunktype=numpy.ndarray>._meta
E + and <class 'sparse._coo.core.COO'> = <module 'sparse' from '/home/travis/miniconda/envs/test-environment/lib/python3.8/site-packages/sparse/__init__.py'>.COO
_____________________ test_array_function_sparse_tensordot _____________________
[gw0] linux -- Python 3.8.2 /home/travis/miniconda/envs/test-environment/bin/python
@pytest.mark.skipif(missing_arrfunc_cond, reason=missing_arrfunc_reason)
def test_array_function_sparse_tensordot():
sparse = pytest.importorskip("sparse")
x = np.random.random((2, 3, 4))
x[x < 0.9] = 0
y = np.random.random((4, 3, 2))
y[y < 0.9] = 0
xx = sparse.COO(x)
yy = sparse.COO(y)
assert_eq(
> np.tensordot(x, y, axes=(2, 0)), np.tensordot(xx, yy, axes=(2, 0)).todense()
)
________________________ test_apply_gufunc_via_numba_01 ________________________
[gw0] linux -- Python 3.8.2 /home/travis/miniconda/envs/test-environment/bin/python
def test_apply_gufunc_via_numba_01():
numba = pytest.importorskip("numba")
@numba.guvectorize(
[(numba.float64[:], numba.float64[:], numba.float64[:])], "(n),(n)->(n)"
)
> def g(x, y, res):
dask/array/tests/test_gufunc.py:565:
_________________ test_to_parquet_lazy[fastparquet-processes] __________________
[gw1] linux -- Python 3.8.2 /home/travis/miniconda/envs/test-environment/bin/python
tmpdir = '/tmp/pytest-of-travis/pytest-1/test_to_parquet_lazy_fastparqu1'
scheduler = 'processes', engine = 'fastparquet'
@pytest.mark.parametrize("scheduler", ["threads", "processes"])
def test_to_parquet_lazy(tmpdir, scheduler, engine):
tmpdir = str(tmpdir)
df = pd.DataFrame({"a": [1, 2, 3, 4], "b": [1.0, 2.0, 3.0, 4.0]})
df.index.name = "index"
ddf = dd.from_pandas(df, npartitions=2)
value = ddf.to_parquet(tmpdir, compute=False, engine=engine)
assert hasattr(value, "dask")
> value.compute(scheduler=scheduler)
NumPy
______________________________ test_argwhere_str _______________________________
[gw1] linux -- Python 3.8.2 /home/travis/miniconda/envs/test-environment/bin/python
def test_argwhere_str():
x = np.array(list("Hello world"))
d = da.from_array(x, chunks=(4,))
x_nz = np.argwhere(x)
d_nz = da.argwhere(d)
> assert_eq(d_nz, x_nz)
____________________________ test_count_nonzero_str ____________________________
[gw1] linux -- Python 3.8.2 /home/travis/miniconda/envs/test-environment/bin/python
def test_count_nonzero_str():
x = np.array(list("Hello world"))
d = da.from_array(x, chunks=(4,))
x_c = np.count_nonzero(x)
d_c = da.count_nonzero(d)
> assert x_c == d_c.compute()
E assert 10 == 11
E + where 11 = <bound method DaskMethodsMixin.compute of dask.array<sum-aggregate, shape=(), dtype=int64, chunksize=(), chunktype=numpy.ndarray>>()
E + where <bound method DaskMethodsMixin.compute of dask.array<sum-aggregate, shape=(), dtype=int64, chunksize=(), chunktype=numpy.ndarray>> = dask.array<sum-aggregate, shape=(), dtype=int64, chunksize=(), chunktype=numpy.ndarray>.compute
dask/array/tests/test_routines.py:1322: AssertionError
________________________________ test_tensordot ________________________________
[gw1] linux -- Python 3.8.2 /home/travis/miniconda/envs/test-environment/bin/python
@pytest.mark.skipif(
sparse.__version__ < "0.7.0+10",
reason="fixed in https://github.com/pydata/sparse/pull/256",
)
def test_tensordot():
x = da.random.random((2, 3, 4), chunks=(1, 2, 2))
x[x < 0.8] = 0
y = da.random.random((4, 3, 2), chunks=(2, 2, 1))
y[y < 0.8] = 0
xx = x.map_blocks(sparse.COO.from_numpy)
yy = y.map_blocks(sparse.COO.from_numpy)
> assert_eq(da.tensordot(x, y, axes=(2, 0)), da.tensordot(xx, yy, axes=(2, 0)))
Pandas
The deprecation warnings should be fixed in pandas master. Check the build number. May
be MacPython wheel building issue.
_________________________ test_read_json_meta[records] _________________________
[gw0] linux -- Python 3.8.2 /home/travis/miniconda/envs/test-environment/bin/python
orient = 'records'
tmpdir = local('/tmp/pytest-of-travis/pytest-0/test_read_json_meta_records_0')
@pytest.mark.parametrize("orient", ["split", "records", "index", "columns", "values"])
def test_read_json_meta(orient, tmpdir):
df = pd.DataFrame({"x": range(5), "y": ["a", "b", "c", "d", "e"]})
df2 = df.assign(x=df.x + 0.5)
lines = orient == "records"
> df.to_json(str(tmpdir.join("fil1.json")), orient=orient, lines=lines)
dask/dataframe/io/tests/test_json.py:49:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../miniconda/envs/test-environment/lib/python3.8/site-packages/pandas/core/generic.py:2331: in to_json
return json.to_json(
../../../miniconda/envs/test-environment/lib/python3.8/site-packages/pandas/io/json/_json.py:88: in to_json
s = convert_to_line_delimits(s)
../../../miniconda/envs/test-environment/lib/python3.8/site-packages/pandas/io/json/_normalize.py:28: in convert_to_line_delimits
return convert_json_to_lines(s)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
> ???
E DeprecationWarning: tostring() is deprecated. Use tobytes() instead.
pandas/_libs/writers.pyx:115: DeprecationWarning
______________________ test_describe[all-None-None-None] _______________________
[gw2] linux -- Python 3.8.2 /home/travis/miniconda/envs/test-environment/bin/python
...ggV
Reactions are currently unavailable