@@ -132,7 +132,6 @@ class InfoWorkflow:
132
132
async def run (self ) -> Dict :
133
133
# Convert to JSON and back so it'll stringify un-JSON-able pieces
134
134
ret = dataclasses .asdict (workflow .info ())
135
- ret ["current_history_length" ] = workflow .info ().get_current_history_length ()
136
135
return json .loads (json .dumps (ret , default = str ))
137
136
138
137
@@ -158,7 +157,6 @@ async def test_workflow_info(client: Client, env: WorkflowEnvironment):
158
157
)
159
158
assert info ["attempt" ] == 1
160
159
assert info ["cron_schedule" ] is None
161
- assert info ["current_history_length" ] == 3
162
160
assert info ["execution_timeout" ] is None
163
161
assert info ["namespace" ] == client .namespace
164
162
assert info ["retry_policy" ] == json .loads (
@@ -173,6 +171,68 @@ async def test_workflow_info(client: Client, env: WorkflowEnvironment):
173
171
assert info ["workflow_type" ] == "InfoWorkflow"
174
172
175
173
174
+ @dataclass
175
+ class HistoryInfo :
176
+ history_length : int
177
+ history_size : int
178
+ continue_as_new_suggested : bool
179
+
180
+
181
+ @workflow .defn
182
+ class HistoryInfoWorkflow :
183
+ @workflow .run
184
+ async def run (self ) -> None :
185
+ # Just wait forever
186
+ await workflow .wait_condition (lambda : False )
187
+
188
+ @workflow .signal
189
+ async def bunch_of_events (self , count : int ) -> None :
190
+ # Create a lot of one-day timers
191
+ for _ in range (count ):
192
+ asyncio .create_task (asyncio .sleep (60 * 60 * 24 ))
193
+
194
+ @workflow .query
195
+ def get_history_info (self ) -> HistoryInfo :
196
+ return HistoryInfo (
197
+ history_length = workflow .info ().get_current_history_length (),
198
+ history_size = workflow .info ().get_current_history_size (),
199
+ continue_as_new_suggested = workflow .info ().is_continue_as_new_suggested (),
200
+ )
201
+
202
+
203
+ async def test_workflow_history_info (
204
+ client : Client , env : WorkflowEnvironment , continue_as_new_suggest_history_count : int
205
+ ):
206
+ if env .supports_time_skipping :
207
+ pytest .skip ("Java test server does not support should continue as new" )
208
+ async with new_worker (client , HistoryInfoWorkflow ) as worker :
209
+ handle = await client .start_workflow (
210
+ HistoryInfoWorkflow .run ,
211
+ id = f"workflow-{ uuid .uuid4 ()} " ,
212
+ task_queue = worker .task_queue ,
213
+ )
214
+ # Issue query before anything else, which should mean only a history
215
+ # size of 3, at least 100 bytes of history, and no continue as new
216
+ # suggestion
217
+ orig_info = await handle .query (HistoryInfoWorkflow .get_history_info )
218
+ assert orig_info .history_length == 3
219
+ assert orig_info .history_size > 100
220
+ assert not orig_info .continue_as_new_suggested
221
+
222
+ # Now send a lot of events
223
+ await handle .signal (
224
+ HistoryInfoWorkflow .bunch_of_events , continue_as_new_suggest_history_count
225
+ )
226
+ # Send one more event to trigger the WFT update. We have to do this
227
+ # because just a query will have a stale representation of history
228
+ # counts, but signal forces a new WFT.
229
+ await handle .signal (HistoryInfoWorkflow .bunch_of_events , 1 )
230
+ new_info = await handle .query (HistoryInfoWorkflow .get_history_info )
231
+ assert new_info .history_length > continue_as_new_suggest_history_count
232
+ assert new_info .history_size > orig_info .history_size
233
+ assert new_info .continue_as_new_suggested
234
+
235
+
176
236
@workflow .defn
177
237
class SignalAndQueryWorkflow :
178
238
def __init__ (self ) -> None :
0 commit comments