Interface and implementations of various task runners.
Task Runners in Prefect are responsible for managing the execution of Prefect task runs. Generally speaking, users are not expected to interact with task runners outside of configuring and initializing them for a flow.
Example
>>> from prefect import flow, task
>>> from prefect.task_runners import SequentialTaskRunner
>>> from typing import List
>>>
>>> @task
>>> def say_hello(name):
... print(f"hello {name}")
>>>
>>> @task
>>> def say_goodbye(name):
... print(f"goodbye {name}")
>>>
>>> @flow(task_runner=SequentialTaskRunner())
>>> def greetings(names: List[str]):
... for name in names:
... say_hello(name)
... say_goodbye(name)
>>>
>>> greetings(["arthur", "trillian", "ford", "marvin"])
hello arthur
goodbye arthur
hello trillian
goodbye trillian
hello ford
goodbye ford
hello marvin
goodbye marvin
Switching to a DaskTaskRunner
:
>>> from prefect_dask.task_runners import DaskTaskRunner
>>> flow.task_runner = DaskTaskRunner()
>>> greetings(["arthur", "trillian", "ford", "marvin"])
hello arthur
goodbye arthur
hello trillian
hello ford
goodbye marvin
hello marvin
goodbye ford
goodbye trillian
For usage details, see the Task Runners documentation.
BaseTaskRunner
Source code in prefect/task_runners.py
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201 | class BaseTaskRunner(metaclass=abc.ABCMeta):
def __init__(self) -> None:
self.logger = get_logger(f"task_runner.{self.name}")
self._started: bool = False
@property
@abc.abstractmethod
def concurrency_type(self) -> TaskConcurrencyType:
pass # noqa
@property
def name(self):
return type(self).__name__.lower().replace("taskrunner", "")
def duplicate(self):
"""
Return a new task runner instance with the same options.
"""
# The base class returns `NotImplemented` to indicate that this is not yet
# implemented by a given task runner.
return NotImplemented
def __eq__(self, other: object) -> bool:
"""
Returns true if the task runners use the same options.
"""
if type(other) == type(self) and (
# Compare public attributes for naive equality check
# Subclasses should implement this method with a check init option equality
{k: v for k, v in self.__dict__.items() if not k.startswith("_")}
== {k: v for k, v in other.__dict__.items() if not k.startswith("_")}
):
return True
else:
return NotImplemented
@abc.abstractmethod
async def submit(
self,
key: UUID,
call: Callable[..., Awaitable[State[R]]],
) -> None:
"""
Submit a call for execution and return a `PrefectFuture` that can be used to
get the call result.
Args:
task_run: The task run being submitted.
task_key: A unique key for this orchestration run of the task. Can be used
for caching.
call: The function to be executed
run_kwargs: A dict of keyword arguments to pass to `call`
Returns:
A future representing the result of `call` execution
"""
raise NotImplementedError()
@abc.abstractmethod
async def wait(self, key: UUID, timeout: float = None) -> Optional[State]:
"""
Given a `PrefectFuture`, wait for its return state up to `timeout` seconds.
If it is not finished after the timeout expires, `None` should be returned.
Implementers should be careful to ensure that this function never returns or
raises an exception.
"""
raise NotImplementedError()
@asynccontextmanager
async def start(
self: T,
) -> AsyncIterator[T]:
"""
Start the task runner, preparing any resources necessary for task submission.
Children should implement `_start` to prepare and clean up resources.
Yields:
The prepared task runner
"""
if self._started:
raise RuntimeError("The task runner is already started!")
async with AsyncExitStack() as exit_stack:
self.logger.debug("Starting task runner...")
try:
await self._start(exit_stack)
self._started = True
yield self
finally:
self.logger.debug("Shutting down task runner...")
self._started = False
async def _start(self, exit_stack: AsyncExitStack) -> None:
"""
Create any resources required for this task runner to submit work.
Cleanup of resources should be submitted to the `exit_stack`.
"""
pass # noqa
def __str__(self) -> str:
return type(self).__name__
|
duplicate
Return a new task runner instance with the same options.
Source code in prefect/task_runners.py
112
113
114
115
116
117
118 | def duplicate(self):
"""
Return a new task runner instance with the same options.
"""
# The base class returns `NotImplemented` to indicate that this is not yet
# implemented by a given task runner.
return NotImplemented
|
start
async
Start the task runner, preparing any resources necessary for task submission.
Children should implement _start
to prepare and clean up resources.
Yields:
Type |
Description |
AsyncIterator[T]
|
|
Source code in prefect/task_runners.py
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190 | @asynccontextmanager
async def start(
self: T,
) -> AsyncIterator[T]:
"""
Start the task runner, preparing any resources necessary for task submission.
Children should implement `_start` to prepare and clean up resources.
Yields:
The prepared task runner
"""
if self._started:
raise RuntimeError("The task runner is already started!")
async with AsyncExitStack() as exit_stack:
self.logger.debug("Starting task runner...")
try:
await self._start(exit_stack)
self._started = True
yield self
finally:
self.logger.debug("Shutting down task runner...")
self._started = False
|
submit
abstractmethod
async
Submit a call for execution and return a PrefectFuture
that can be used to
get the call result.
Parameters:
Name |
Type |
Description |
Default |
task_run
|
|
The task run being submitted.
|
required
|
task_key
|
|
A unique key for this orchestration run of the task. Can be used
for caching.
|
required
|
call
|
Callable[..., Awaitable[State[R]]]
|
The function to be executed
|
required
|
run_kwargs
|
|
A dict of keyword arguments to pass to call
|
required
|
Returns:
Type |
Description |
None
|
A future representing the result of call execution
|
Source code in prefect/task_runners.py
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154 | @abc.abstractmethod
async def submit(
self,
key: UUID,
call: Callable[..., Awaitable[State[R]]],
) -> None:
"""
Submit a call for execution and return a `PrefectFuture` that can be used to
get the call result.
Args:
task_run: The task run being submitted.
task_key: A unique key for this orchestration run of the task. Can be used
for caching.
call: The function to be executed
run_kwargs: A dict of keyword arguments to pass to `call`
Returns:
A future representing the result of `call` execution
"""
raise NotImplementedError()
|
wait
abstractmethod
async
Given a PrefectFuture
, wait for its return state up to timeout
seconds.
If it is not finished after the timeout expires, None
should be returned.
Implementers should be careful to ensure that this function never returns or
raises an exception.
Source code in prefect/task_runners.py
156
157
158
159
160
161
162
163
164
165 | @abc.abstractmethod
async def wait(self, key: UUID, timeout: float = None) -> Optional[State]:
"""
Given a `PrefectFuture`, wait for its return state up to `timeout` seconds.
If it is not finished after the timeout expires, `None` should be returned.
Implementers should be careful to ensure that this function never returns or
raises an exception.
"""
raise NotImplementedError()
|
ConcurrentTaskRunner
Bases: BaseTaskRunner
A concurrent task runner that allows tasks to switch when blocking on IO.
Synchronous tasks will be submitted to a thread pool maintained by anyio
.
Example
Using a thread for concurrency:
>>> from prefect import flow
>>> from prefect.task_runners import ConcurrentTaskRunner
>>> @flow(task_runner=ConcurrentTaskRunner)
>>> def my_flow():
>>> ...
Source code in prefect/task_runners.py
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365 | class ConcurrentTaskRunner(BaseTaskRunner):
"""
A concurrent task runner that allows tasks to switch when blocking on IO.
Synchronous tasks will be submitted to a thread pool maintained by `anyio`.
Example:
```
Using a thread for concurrency:
>>> from prefect import flow
>>> from prefect.task_runners import ConcurrentTaskRunner
>>> @flow(task_runner=ConcurrentTaskRunner)
>>> def my_flow():
>>> ...
```
"""
def __init__(self):
# TODO: Consider adding `max_workers` support using anyio capacity limiters
# Runtime attributes
self._task_group: anyio.abc.TaskGroup = None
self._result_events: Dict[UUID, Event] = {}
self._results: Dict[UUID, Any] = {}
self._keys: Set[UUID] = set()
super().__init__()
@property
def concurrency_type(self) -> TaskConcurrencyType:
return TaskConcurrencyType.CONCURRENT
def duplicate(self):
return type(self)()
async def submit(
self,
key: UUID,
call: Callable[[], Awaitable[State[R]]],
) -> None:
if not self._started:
raise RuntimeError(
"The task runner must be started before submitting work."
)
if not self._task_group:
raise RuntimeError(
"The concurrent task runner cannot be used to submit work after "
"serialization."
)
# Create an event to set on completion
self._result_events[key] = Event()
# Rely on the event loop for concurrency
self._task_group.start_soon(self._run_and_store_result, key, call)
async def wait(
self,
key: UUID,
timeout: float = None,
) -> Optional[State]:
if not self._task_group:
raise RuntimeError(
"The concurrent task runner cannot be used to wait for work after "
"serialization."
)
return await self._get_run_result(key, timeout)
async def _run_and_store_result(
self, key: UUID, call: Callable[[], Awaitable[State[R]]]
):
"""
Simple utility to store the orchestration result in memory on completion
Since this run is occurring on the main thread, we capture exceptions to prevent
task crashes from crashing the flow run.
"""
try:
result = await call()
except BaseException as exc:
result = await exception_to_crashed_state(exc)
self._results[key] = result
self._result_events[key].set()
async def _get_run_result(
self, key: UUID, timeout: float = None
) -> Optional[State]:
"""
Block until the run result has been populated.
"""
result = None # retval on timeout
# Note we do not use `asyncio.wrap_future` and instead use an `Event` to avoid
# stdlib behavior where the wrapped future is cancelled if the parent future is
# cancelled (as it would be during a timeout here)
with anyio.move_on_after(timeout):
await self._result_events[key].wait()
result = self._results[key]
return result # timeout reached
async def _start(self, exit_stack: AsyncExitStack):
"""
Start the process pool
"""
self._task_group = await exit_stack.enter_async_context(
anyio.create_task_group()
)
def __getstate__(self):
"""
Allow the `ConcurrentTaskRunner` to be serialized by dropping the task group.
"""
data = self.__dict__.copy()
data.update({k: None for k in {"_task_group"}})
return data
def __setstate__(self, data: dict):
"""
When deserialized, we will no longer have a reference to the task group.
"""
self.__dict__.update(data)
self._task_group = None
|
SequentialTaskRunner
Bases: BaseTaskRunner
A simple task runner that executes calls as they are submitted.
If writing synchronous tasks, this runner will always execute tasks sequentially.
If writing async tasks, this runner will execute tasks sequentially unless grouped
using anyio.create_task_group
or asyncio.gather
.
Source code in prefect/task_runners.py
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238 | class SequentialTaskRunner(BaseTaskRunner):
"""
A simple task runner that executes calls as they are submitted.
If writing synchronous tasks, this runner will always execute tasks sequentially.
If writing async tasks, this runner will execute tasks sequentially unless grouped
using `anyio.create_task_group` or `asyncio.gather`.
"""
def __init__(self) -> None:
super().__init__()
self._results: Dict[str, State] = {}
@property
def concurrency_type(self) -> TaskConcurrencyType:
return TaskConcurrencyType.SEQUENTIAL
def duplicate(self):
return type(self)()
async def submit(
self,
key: UUID,
call: Callable[..., Awaitable[State[R]]],
) -> None:
# Run the function immediately and store the result in memory
try:
result = await call()
except BaseException as exc:
result = await exception_to_crashed_state(exc)
self._results[key] = result
async def wait(self, key: UUID, timeout: float = None) -> Optional[State]:
return self._results[key]
|