Skip to content

Commit 5431813

Browse files
committed
Type more utility functions
1 parent aa14656 commit 5431813

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

flox/core.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -231,9 +231,9 @@ def find_group_cohorts(labels, chunks, merge: bool = True):
231231

232232

233233
def rechunk_for_cohorts(
234-
array,
234+
array: DaskArray,
235235
axis: T_Axis,
236-
labels,
236+
labels: np.ndarray,
237237
force_new_chunk_at,
238238
chunksize=None,
239239
ignore_old_chunks=False,
@@ -326,7 +326,7 @@ def rechunk_for_cohorts(
326326
return array.rechunk({axis: newchunks})
327327

328328

329-
def rechunk_for_blockwise(array, axis: T_Axis, labels):
329+
def rechunk_for_blockwise(array: DaskArray, axis: T_Axis, labels: np.ndarray):
330330
"""
331331
Rechunks array so that group boundaries line up with chunk boundaries, allowing
332332
embarassingly parallel group reductions.
@@ -863,9 +863,9 @@ def _conc2(x_chunk, key1, key2=slice(None), axis: T_Axes = None) -> np.ndarray:
863863
# return concatenate3(mapped)
864864

865865

866-
def reindex_intermediates(x, agg, unique_groups):
866+
def reindex_intermediates(x: IntermediateDict, agg: Aggregation, unique_groups) -> IntermediateDict:
867867
new_shape = x["groups"].shape[:-1] + (len(unique_groups),)
868-
newx = {"groups": np.broadcast_to(unique_groups, new_shape)}
868+
newx: IntermediateDict = {"groups": np.broadcast_to(unique_groups, new_shape)}
869869
newx["intermediates"] = tuple(
870870
reindex_(
871871
v, from_=np.atleast_1d(x["groups"].squeeze()), to=pd.Index(unique_groups), fill_value=f
@@ -875,7 +875,7 @@ def reindex_intermediates(x, agg, unique_groups):
875875
return newx
876876

877877

878-
def listify_groups(x):
878+
def listify_groups(x: IntermediateDict):
879879
return list(np.atleast_1d(x["groups"].squeeze()))
880880

881881

0 commit comments

Comments
 (0)