Skip to content

astropy - xarray + dask array #12600

Closed
Closed
@miguelcarcamov

Description

@miguelcarcamov

Hello everyone,

I am trying to use dask, xarray and astropy together - this is mainly because dask-ms retrieves a radio dataset like this. And when I force my arrays to have a unit I get an error if I do an operation with them before.

Example:

import xarray as xr
import dask.array as da
import astropy.units as u

c = xr.DataArray(da.array([1,2,3,4,5,6,7,8,9,10])) * u.m
c[0] *= -1
c.data.compute()
---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/quantity.py in _to_own_unit(self, value, check_precision)
   1338         try:
-> 1339             _value = value.to_value(self.unit)
   1340         except AttributeError:

AttributeError: 'numpy.ndarray' object has no attribute 'to_value'

During handling of the above exception, another exception occurred:

UnitConversionError                       Traceback (most recent call last)
~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/quantity.py in to_value(self, unit, equivalencies)
    737             try:
--> 738                 scale = self.unit._to(unit)
    739             except Exception:

~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/core.py in _to(self, other)
    949 
--> 950         raise UnitConversionError(
    951             f"'{self!r}' is not a scaled version of '{other!r}'")

UnitConversionError: 'Unit(dimensionless)' is not a scaled version of 'Unit("m")'

During handling of the above exception, another exception occurred:

UnitConversionError                       Traceback (most recent call last)
~/Documents/pyralysis/venv/lib/python3.9/site-packages/dask/array/slicing.py in setitem(x, v, indices)
   2175     try:
-> 2176         x[tuple(indices)] = v
   2177     except ValueError as e:

~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/quantity.py in __setitem__(self, i, value)
   1060             self.info.adjust_indices(i, value, len(self))
-> 1061         self.view(np.ndarray).__setitem__(i, self._to_own_unit(value))
   1062 

~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/quantity.py in _to_own_unit(self, value, check_precision)
   1345                 as_quantity = Quantity(value)
-> 1346                 _value = as_quantity.to_value(self.unit)
   1347             except TypeError:

~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/quantity.py in to_value(self, unit, equivalencies)
    740                 # Short-cut failed; try default (maybe equivalencies help).
--> 741                 value = self._to_value(unit, equivalencies)
    742             else:

~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/quantity.py in _to_value(self, unit, equivalencies)
    659             equivalencies = self._equivalencies
--> 660         return self.unit.to(unit, self.view(np.ndarray),
    661                             equivalencies=equivalencies)

~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/core.py in to(self, other, value, equivalencies)
    986         else:
--> 987             return self._get_converter(other, equivalencies=equivalencies)(value)
    988 

~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/core.py in _get_converter(self, other, equivalencies)
    917 
--> 918             raise exc
    919 

~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/core.py in _get_converter(self, other, equivalencies)
    902         try:
--> 903             return self._apply_equivalencies(
    904                 self, other, self._normalize_equivalencies(equivalencies))

~/Documents/pyralysis/venv/lib/python3.9/site-packages/astropy/units/core.py in _apply_equivalencies(self, unit, other, equivalencies)
    885 
--> 886         raise UnitConversionError(
    887             "{} and {} are not convertible".format(

UnitConversionError: '' (dimensionless) and 'm' (length) are not convertible

The above exception was the direct cause of the following exception:

ValueError                                Traceback (most recent call last)
/tmp/ipykernel_577474/1384406403.py in <module>
----> 1 c.data.compute()

~/Documents/pyralysis/venv/lib/python3.9/site-packages/dask/base.py in compute(self, **kwargs)
    283         dask.base.compute
    284         """
--> 285         (result,) = compute(self, traverse=False, **kwargs)
    286         return result
    287 

~/Documents/pyralysis/venv/lib/python3.9/site-packages/dask/base.py in compute(*args, **kwargs)
    565         postcomputes.append(x.__dask_postcompute__())
    566 
--> 567     results = schedule(dsk, keys, **kwargs)
    568     return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
    569 

~/Documents/pyralysis/venv/lib/python3.9/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, pool, **kwargs)
     77             pool = MultiprocessingPoolExecutor(pool)
     78 
---> 79     results = get_async(
     80         pool.submit,
     81         pool._max_workers,

~/Documents/pyralysis/venv/lib/python3.9/site-packages/dask/local.py in get_async(submit, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, chunksize, **kwargs)
    512                             _execute_task(task, data)  # Re-execute locally
    513                         else:
--> 514                             raise_exception(exc, tb)
    515                     res, worker_id = loads(res_info)
    516                     state["cache"][key] = res

~/Documents/pyralysis/venv/lib/python3.9/site-packages/dask/local.py in reraise(exc, tb)
    323     if exc.__traceback__ is not tb:
    324         raise exc.with_traceback(tb)
--> 325     raise exc
    326 
    327 

~/Documents/pyralysis/venv/lib/python3.9/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
    221     try:
    222         task, data = loads(task_info)
--> 223         result = _execute_task(task, data)
    224         id = get_id()
    225         result = dumps((result, id))

~/Documents/pyralysis/venv/lib/python3.9/site-packages/dask/core.py in _execute_task(arg, cache, dsk)
    119         # temporaries by their reference count and can execute certain
    120         # operations in-place.
--> 121         return func(*(_execute_task(a, cache) for a in args))
    122     elif not ishashable(arg):
    123         return arg

~/Documents/pyralysis/venv/lib/python3.9/site-packages/dask/array/slicing.py in setitem(x, v, indices)
   2176         x[tuple(indices)] = v
   2177     except ValueError as e:
-> 2178         raise ValueError(
   2179             "shape mismatch: value array could " "not be broadcast to indexing result"
   2180         ) from e

ValueError: shape mismatch: value array could not be broadcast to indexing result

import platform; print(platform.platform())
Linux-5.14.21-2-MANJARO-x86_64-with-glibc2.33
import sys; print("Python", sys.version)
Python 3.9.9 (main, Nov 20 2021, 21:30:06)
[GCC 11.1.0]
import numpy; print("Numpy", numpy.version)
Numpy 1.20.3
import erfa; print("pyerfa", erfa.version)
pyerfa 2.0.0
import astropy; print("astropy", astropy.version)
astropy 4.2.1
import scipy; print("Scipy", scipy.version)
Scipy 1.6.3
import matplotlib; print("Matplotlib", matplotlib.version)
Matplotlib 3.4.2

I'm not sure if this an error of xarray or astropy side, but I guess we need to figure that out.

Metadata

Metadata

Assignees

No one assigned

    Labels

    Feature RequestexternalPRs and issues related to external packages vendored with Astropy (astropy.extern)units

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions