Skip to content

Commit 9d96665

Browse files
committed
Issue #720/#402/#725 add documentation
1 parent 19db418 commit 9d96665

File tree

4 files changed

+131
-23
lines changed

4 files changed

+131
-23
lines changed

docs/api.rst

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,18 @@ openeo.rest.mlmodel
4747
:inherited-members:
4848

4949

50-
openeo.rest.multiresult
51-
-----------------------
50+
51+
52+
Results
53+
--------
54+
55+
.. automodule:: openeo.rest.result
56+
:members:
57+
:inherited-members:
58+
59+
.. automodule:: openeo.rest.stac_resource
60+
:members:
61+
5262

5363
.. automodule:: openeo.rest.multiresult
5464
:members: MultiResult

openeo/rest/datacube.py

Lines changed: 31 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2369,7 +2369,7 @@ def download(
23692369
If outputfile is provided, the result is stored on disk locally, otherwise, a bytes object is returned.
23702370
The bytes object can be passed on to a suitable decoder for decoding.
23712371
2372-
:param outputfile: Optional, an output file if the result needs to be stored on disk.
2372+
:param outputfile: Optional, output path to download to.
23732373
:param format: Optional, an output format supported by the backend.
23742374
:param options: Optional, file format options
23752375
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
@@ -2515,13 +2515,29 @@ def execute_batch(
25152515
**format_options,
25162516
) -> BatchJob:
25172517
"""
2518-
Evaluate the process graph by creating a batch job, and retrieving the results when it is finished.
2519-
This method is mostly recommended if the batch job is expected to run in a reasonable amount of time.
2518+
Execute the underlying process graph at the backend in batch job mode:
25202519
2521-
For very long-running jobs, you probably do not want to keep the client running.
2520+
- create the job (like :py:meth:`create_job`)
2521+
- start the job (like :py:meth:`BatchJob.start() <openeo.rest.job.BatchJob.start>`)
2522+
- track the job's progress with an active polling loop
2523+
(like :py:meth:`BatchJob.run_synchronous() <openeo.rest.job.BatchJob.run_synchronous>`)
2524+
- optionally (if ``outputfile`` is specified) download the job's results
2525+
when the job finished successfully
25222526
2523-
:param outputfile: The path of a file to which a result can be written
2527+
.. note::
2528+
Because of the active polling loop,
2529+
which blocks any further progress of your script or application,
2530+
this :py:meth:`execute_batch` method is mainly recommended
2531+
for batch jobs that are expected to complete
2532+
in a time that is reasonable for your use case.
2533+
2534+
:param outputfile: Optional, output path to download to.
25242535
:param out_format: (optional) File format to use for the job result.
2536+
:param title: job title.
2537+
:param description: job description.
2538+
:param plan: The billing plan to process and charge the job with
2539+
:param budget: Maximum budget to be spent on executing the job.
2540+
Note that some backends do not honor this limit.
25252541
:param additional: additional (top-level) properties to set in the request body
25262542
:param job_options: dictionary of job options to pass to the backend
25272543
(under top-level property "job_options")
@@ -2601,17 +2617,19 @@ def create_job(
26012617
**format_options,
26022618
) -> BatchJob:
26032619
"""
2604-
Sends the datacube's process graph as a batch job to the back-end
2605-
and return a :py:class:`~openeo.rest.job.BatchJob` instance.
2620+
Send the underlying process graph to the backend
2621+
to create an openEO batch job
2622+
and return a corresponding :py:class:`~openeo.rest.job.BatchJob` instance.
26062623
2607-
Note that the batch job will just be created at the back-end,
2608-
it still needs to be started and tracked explicitly.
2609-
Use :py:meth:`execute_batch` instead to have the openEO Python client take care of that job management.
2624+
Note that this method only *creates* the openEO batch job at the backend,
2625+
but it does not *start* it.
2626+
Use :py:meth:`execute_batch` instead to let the openEO Python client
2627+
take care of the full job life cycle: create, start and track its progress until completion.
26102628
26112629
:param out_format: output file format.
2612-
:param title: job title
2613-
:param description: job description
2614-
:param plan: The billing plan to process and charge the job with
2630+
:param title: job title.
2631+
:param description: job description.
2632+
:param plan: The billing plan to process and charge the job with.
26152633
:param budget: Maximum budget to be spent on executing the job.
26162634
Note that some backends do not honor this limit.
26172635
:param additional: additional (top-level) properties to set in the request body

openeo/rest/result.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,11 @@
22

33

44
class SaveResult(StacResource):
5-
"""TODO"""
5+
"""
6+
Alias for :py:class:`~openeo.rest.stac_resource.StacResource`,
7+
returned by methods corresponding to the openEO process ``save_result``, like
8+
:py:meth:`DataCube.save_result() <openeo.rest.datacube.DataCube.save_result>`
9+
and :py:meth:`VectorCube.save_result() <openeo.rest.vectorcube.VectorCube.save_result>`
610
7-
pass
11+
.. versionadded:: 0.39.0
12+
"""

openeo/rest/stac_resource.py

Lines changed: 81 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,17 @@
2020
class StacResource(_ProcessGraphAbstraction):
2121
"""
2222
Handle for a progress graph node that represents a STAC resource (object with subtype "stac"),
23-
e.g. as returned by `save_result`, or handled by `export_workspace`/`stac_modify`.
24-
23+
e.g. as returned by openEO process ``save_result``,
24+
or handled by openEO processes ``export_workspace``/``stac_modify``.
2525
2626
Refers to a STAC resource of any type (Catalog, Collection, or Item).
2727
It can refer to:
28+
2829
- static STAC resources, e.g. hosted on cloud storage
2930
- dynamic STAC resources made available via a STAC API
3031
- a STAC JSON representation embedded as an argument into an openEO user-defined process
32+
33+
.. versionadded:: 0.39.0
3134
"""
3235

3336
def __init__(self, graph: PGNode, connection: Optional[Connection] = None):
@@ -59,7 +62,21 @@ def download(
5962
additional: Optional[dict] = None,
6063
job_options: Optional[dict] = None,
6164
):
62-
"""TODO"""
65+
"""
66+
Execute synchronously and download the result (cube).
67+
68+
If outputfile is provided, the result is stored on disk locally, otherwise, a bytes object is returned.
69+
The bytes object can be passed on to a suitable decoder for decoding.
70+
71+
:param outputfile: Optional, output path to download to.
72+
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
73+
(overruling the connection's ``auto_validate`` setting).
74+
:param additional: additional (top-level) properties to set in the request body
75+
:param job_options: dictionary of job options to pass to the backend
76+
(under top-level property "job_options")
77+
78+
:return: None if the result is stored to disk, or a bytes object returned by the backend.
79+
"""
6380
return self._connection.download(
6481
graph=self.flat_graph(),
6582
outputfile=outputfile,
@@ -80,7 +97,31 @@ def create_job(
8097
validate: Optional[bool] = None,
8198
log_level: Optional[str] = None,
8299
) -> BatchJob:
83-
"""TODO"""
100+
"""
101+
Send the underlying process graph to the backend
102+
to create an openEO batch job
103+
and return a corresponding :py:class:`~openeo.rest.job.BatchJob` instance.
104+
105+
Note that this method only *creates* the openEO batch job at the backend,
106+
but it does not *start* it.
107+
Use :py:meth:`execute_batch` instead to let the openEO Python client
108+
take care of the full job life cycle: create, start and track its progress until completion.
109+
110+
:param title: job title.
111+
:param description: job description.
112+
:param plan: The billing plan to process and charge the job with.
113+
:param budget: Maximum budget to be spent on executing the job.
114+
Note that some backends do not honor this limit.
115+
:param additional: additional (top-level) properties to set in the request body
116+
:param job_options: dictionary of job options to pass to the backend
117+
(under top-level property "job_options")
118+
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
119+
(overruling the connection's ``auto_validate`` setting).
120+
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
121+
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
122+
123+
:return: Handle for the job created at the backend.
124+
"""
84125
return self._connection.create_job(
85126
process_graph=self.flat_graph(),
86127
title=title,
@@ -107,11 +148,45 @@ def execute_batch(
107148
additional: Optional[dict] = None,
108149
job_options: Optional[dict] = None,
109150
validate: Optional[bool] = None,
110-
auto_add_save_result: bool = True,
111151
show_error_logs: bool = True,
112152
log_level: Optional[str] = None,
113153
) -> BatchJob:
114-
"""TODO"""
154+
"""
155+
Execute the underlying process graph at the backend in batch job mode:
156+
157+
- create the job (like :py:meth:`create_job`)
158+
- start the job (like :py:meth:`BatchJob.start() <openeo.rest.job.BatchJob.start>`)
159+
- track the job's progress with an active polling loop
160+
(like :py:meth:`BatchJob.run_synchronous() <openeo.rest.job.BatchJob.run_synchronous>`)
161+
- optionally (if ``outputfile`` is specified) download the job's results
162+
when the job finished successfully
163+
164+
.. note::
165+
Because of the active polling loop,
166+
which blocks any further progress of your script or application,
167+
this :py:meth:`execute_batch` method is mainly recommended
168+
for batch jobs that are expected to complete
169+
in a time that is reasonable for your use case.
170+
171+
:param outputfile: Optional, output path to download to.
172+
:param title: job title.
173+
:param description: job description.
174+
:param plan: The billing plan to process and charge the job with
175+
:param budget: Maximum budget to be spent on executing the job.
176+
Note that some backends do not honor this limit.
177+
:param additional: additional (top-level) properties to set in the request body
178+
:param job_options: dictionary of job options to pass to the backend
179+
(under top-level property "job_options")
180+
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
181+
(overruling the connection's ``auto_validate`` setting).
182+
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
183+
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
184+
:param print: print/logging function to show progress/status
185+
:param max_poll_interval: maximum number of seconds to sleep between job status polls
186+
:param connection_retry_interval: how long to wait when status poll failed due to connection issue
187+
:param show_error_logs: whether to automatically print error logs when the batch job failed.
188+
189+
"""
115190
job = self.create_job(
116191
title=title,
117192
description=description,

0 commit comments

Comments
 (0)