20
20
class StacResource (_ProcessGraphAbstraction ):
21
21
"""
22
22
Handle for a progress graph node that represents a STAC resource (object with subtype "stac"),
23
- e.g. as returned by `save_result`, or handled by `export_workspace`/`stac_modify`.
24
-
23
+ e.g. as returned by openEO process ``save_result``,
24
+ or handled by openEO processes ``export_workspace``/``stac_modify``.
25
25
26
26
Refers to a STAC resource of any type (Catalog, Collection, or Item).
27
27
It can refer to:
28
+
28
29
- static STAC resources, e.g. hosted on cloud storage
29
30
- dynamic STAC resources made available via a STAC API
30
31
- a STAC JSON representation embedded as an argument into an openEO user-defined process
32
+
33
+ .. versionadded:: 0.39.0
31
34
"""
32
35
33
36
def __init__ (self , graph : PGNode , connection : Optional [Connection ] = None ):
@@ -59,7 +62,21 @@ def download(
59
62
additional : Optional [dict ] = None ,
60
63
job_options : Optional [dict ] = None ,
61
64
):
62
- """TODO"""
65
+ """
66
+ Execute synchronously and download the result (cube).
67
+
68
+ If outputfile is provided, the result is stored on disk locally, otherwise, a bytes object is returned.
69
+ The bytes object can be passed on to a suitable decoder for decoding.
70
+
71
+ :param outputfile: Optional, output path to download to.
72
+ :param validate: Optional toggle to enable/prevent validation of the process graphs before execution
73
+ (overruling the connection's ``auto_validate`` setting).
74
+ :param additional: additional (top-level) properties to set in the request body
75
+ :param job_options: dictionary of job options to pass to the backend
76
+ (under top-level property "job_options")
77
+
78
+ :return: None if the result is stored to disk, or a bytes object returned by the backend.
79
+ """
63
80
return self ._connection .download (
64
81
graph = self .flat_graph (),
65
82
outputfile = outputfile ,
@@ -80,7 +97,31 @@ def create_job(
80
97
validate : Optional [bool ] = None ,
81
98
log_level : Optional [str ] = None ,
82
99
) -> BatchJob :
83
- """TODO"""
100
+ """
101
+ Send the underlying process graph to the backend
102
+ to create an openEO batch job
103
+ and return a corresponding :py:class:`~openeo.rest.job.BatchJob` instance.
104
+
105
+ Note that this method only *creates* the openEO batch job at the backend,
106
+ but it does not *start* it.
107
+ Use :py:meth:`execute_batch` instead to let the openEO Python client
108
+ take care of the full job life cycle: create, start and track its progress until completion.
109
+
110
+ :param title: job title.
111
+ :param description: job description.
112
+ :param plan: The billing plan to process and charge the job with.
113
+ :param budget: Maximum budget to be spent on executing the job.
114
+ Note that some backends do not honor this limit.
115
+ :param additional: additional (top-level) properties to set in the request body
116
+ :param job_options: dictionary of job options to pass to the backend
117
+ (under top-level property "job_options")
118
+ :param validate: Optional toggle to enable/prevent validation of the process graphs before execution
119
+ (overruling the connection's ``auto_validate`` setting).
120
+ :param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
121
+ One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
122
+
123
+ :return: Handle for the job created at the backend.
124
+ """
84
125
return self ._connection .create_job (
85
126
process_graph = self .flat_graph (),
86
127
title = title ,
@@ -107,11 +148,45 @@ def execute_batch(
107
148
additional : Optional [dict ] = None ,
108
149
job_options : Optional [dict ] = None ,
109
150
validate : Optional [bool ] = None ,
110
- auto_add_save_result : bool = True ,
111
151
show_error_logs : bool = True ,
112
152
log_level : Optional [str ] = None ,
113
153
) -> BatchJob :
114
- """TODO"""
154
+ """
155
+ Execute the underlying process graph at the backend in batch job mode:
156
+
157
+ - create the job (like :py:meth:`create_job`)
158
+ - start the job (like :py:meth:`BatchJob.start() <openeo.rest.job.BatchJob.start>`)
159
+ - track the job's progress with an active polling loop
160
+ (like :py:meth:`BatchJob.run_synchronous() <openeo.rest.job.BatchJob.run_synchronous>`)
161
+ - optionally (if ``outputfile`` is specified) download the job's results
162
+ when the job finished successfully
163
+
164
+ .. note::
165
+ Because of the active polling loop,
166
+ which blocks any further progress of your script or application,
167
+ this :py:meth:`execute_batch` method is mainly recommended
168
+ for batch jobs that are expected to complete
169
+ in a time that is reasonable for your use case.
170
+
171
+ :param outputfile: Optional, output path to download to.
172
+ :param title: job title.
173
+ :param description: job description.
174
+ :param plan: The billing plan to process and charge the job with
175
+ :param budget: Maximum budget to be spent on executing the job.
176
+ Note that some backends do not honor this limit.
177
+ :param additional: additional (top-level) properties to set in the request body
178
+ :param job_options: dictionary of job options to pass to the backend
179
+ (under top-level property "job_options")
180
+ :param validate: Optional toggle to enable/prevent validation of the process graphs before execution
181
+ (overruling the connection's ``auto_validate`` setting).
182
+ :param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
183
+ One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
184
+ :param print: print/logging function to show progress/status
185
+ :param max_poll_interval: maximum number of seconds to sleep between job status polls
186
+ :param connection_retry_interval: how long to wait when status poll failed due to connection issue
187
+ :param show_error_logs: whether to automatically print error logs when the batch job failed.
188
+
189
+ """
115
190
job = self .create_job (
116
191
title = title ,
117
192
description = description ,
0 commit comments