@@ -359,6 +359,34 @@ def _assert_empty(args: tuple, msg: str = "%s") -> None:
359
359
raise ValueError (msg % args )
360
360
361
361
362
+ def _maybe_chunk (
363
+ name ,
364
+ var ,
365
+ chunks = None ,
366
+ token = None ,
367
+ lock = None ,
368
+ name_prefix = "xarray-" ,
369
+ overwrite_encoded_chunks = False ,
370
+ ):
371
+ from dask .base import tokenize
372
+
373
+ if chunks is not None :
374
+ chunks = {dim : chunks [dim ] for dim in var .dims if dim in chunks }
375
+ if var .ndim :
376
+ # when rechunking by different amounts, make sure dask names change
377
+ # by provinding chunks as an input to tokenize.
378
+ # subtle bugs result otherwise. see GH3350
379
+ token2 = tokenize (name , token if token else var ._data , chunks )
380
+ name2 = f"{ name_prefix } { name } -{ token2 } "
381
+ var = var .chunk (chunks , name = name2 , lock = lock )
382
+
383
+ if overwrite_encoded_chunks and var .chunks is not None :
384
+ var .encoding ["chunks" ] = tuple (x [0 ] for x in var .chunks )
385
+ return var
386
+ else :
387
+ return var
388
+
389
+
362
390
def as_dataset (obj : Any ) -> "Dataset" :
363
391
"""Cast the given object to a Dataset.
364
392
@@ -1761,7 +1789,6 @@ def chunk(
1761
1789
-------
1762
1790
chunked : xarray.Dataset
1763
1791
"""
1764
- from dask .base import tokenize
1765
1792
1766
1793
if isinstance (chunks , (Number , str )):
1767
1794
chunks = dict .fromkeys (self .dims , chunks )
@@ -1774,26 +1801,10 @@ def chunk(
1774
1801
"object: %s" % bad_dims
1775
1802
)
1776
1803
1777
- def selkeys (dict_ , keys ):
1778
- if dict_ is None :
1779
- return None
1780
- return {d : dict_ [d ] for d in keys if d in dict_ }
1781
-
1782
- def maybe_chunk (name , var , chunks ):
1783
- chunks = selkeys (chunks , var .dims )
1784
- if not chunks :
1785
- chunks = None
1786
- if var .ndim > 0 :
1787
- # when rechunking by different amounts, make sure dask names change
1788
- # by provinding chunks as an input to tokenize.
1789
- # subtle bugs result otherwise. see GH3350
1790
- token2 = tokenize (name , token if token else var ._data , chunks )
1791
- name2 = f"{ name_prefix } { name } -{ token2 } "
1792
- return var .chunk (chunks , name = name2 , lock = lock )
1793
- else :
1794
- return var
1795
-
1796
- variables = {k : maybe_chunk (k , v , chunks ) for k , v in self .variables .items ()}
1804
+ variables = {
1805
+ k : _maybe_chunk (k , v , chunks , token , lock , name_prefix )
1806
+ for k , v in self .variables .items ()
1807
+ }
1797
1808
return self ._replace (variables )
1798
1809
1799
1810
def _validate_indexers (
0 commit comments