14
14
import shellescape
15
15
from schema_salad .ref_resolver import file_uri , uri_file_path
16
16
from schema_salad .sourceline import SourceLine , indent
17
- from typing import Any , Callable , cast , Generator , Text , Union
17
+ from typing import Any , Callable , cast , Generator , Text , Union , Dict
18
18
19
19
from .builder import CONTENT_LIMIT , substitute , Builder
20
20
from .pathmapper import adjustFileObjs , adjustDirObjs , visit_class
21
21
from .errors import WorkflowException
22
- from .job import CommandLineJob
22
+ from .job import JobBase , CommandLineJob , DockerCommandLineJob
23
23
from .pathmapper import PathMapper , get_listing , trim_listing
24
- from .process import Process , shortname , uniquename , normalizeFilesDirs , compute_checksums
24
+ from .process import Process , shortname , uniquename , normalizeFilesDirs , compute_checksums , _logger_validation_warnings
25
25
from .stdfsaccess import StdFsAccess
26
26
from .utils import aslist
27
27
@@ -148,8 +148,9 @@ def run(self, **kwargs):
148
148
149
149
# map files to assigned path inside a container. We need to also explicitly
150
150
# walk over input as implicit reassignment doesn't reach everything in builder.bindings
151
- def check_adjust (builder , f ):
152
- # type: (Builder, Dict[Text, Any]) -> Dict[Text, Any]
151
+ def check_adjust (builder , stepname , f ):
152
+ # type: (Builder, Text, Dict[Text, Any]) -> Dict[Text, Any]
153
+
153
154
f ["path" ] = builder .pathmapper .mapper (f ["location" ])[1 ]
154
155
f ["dirname" ], f ["basename" ] = os .path .split (f ["path" ])
155
156
if f ["class" ] == "File" :
@@ -171,20 +172,23 @@ def __init__(self, toolpath_object, **kwargs):
171
172
# type: (Dict[Text, Any], **Any) -> None
172
173
super (CommandLineTool , self ).__init__ (toolpath_object , ** kwargs )
173
174
174
- def makeJobRunner (self ): # type: () -> CommandLineJob
175
- return CommandLineJob ()
175
+ def makeJobRunner (self ): # type: () -> JobBase
176
+ dockerReq , _ = self .get_requirement ("DockerRequirement" )
177
+ if dockerReq :
178
+ return DockerCommandLineJob ()
179
+ else :
180
+ return CommandLineJob ()
176
181
177
182
def makePathMapper (self , reffiles , stagedir , ** kwargs ):
178
183
# type: (List[Any], Text, **Any) -> PathMapper
179
- dockerReq , _ = self .get_requirement ("DockerRequirement" )
180
184
return PathMapper (reffiles , kwargs ["basedir" ], stagedir )
181
185
182
186
def job (self ,
183
187
job_order , # type: Dict[Text, Text]
184
188
output_callbacks , # type: Callable[[Any, Any], Any]
185
189
** kwargs # type: Any
186
190
):
187
- # type: (...) -> Generator[Union[CommandLineJob , CallbackJob], None, None]
191
+ # type: (...) -> Generator[Union[JobBase , CallbackJob], None, None]
188
192
189
193
jobname = uniquename (kwargs .get ("name" , shortname (self .tool .get ("id" , "job" ))))
190
194
@@ -199,9 +203,9 @@ def job(self,
199
203
cachebuilder .stagedir ,
200
204
separateDirs = False )
201
205
_check_adjust = partial (check_adjust , cachebuilder )
202
-
203
206
visit_class ([cachebuilder .files , cachebuilder .bindings ],
204
207
("File" , "Directory" ), _check_adjust )
208
+
205
209
cmdline = flatten (map (cachebuilder .generate_arg , cachebuilder .bindings ))
206
210
(docker_req , docker_is_req ) = self .get_requirement ("DockerRequirement" )
207
211
if docker_req and kwargs .get ("use_container" ) is not False :
@@ -296,7 +300,7 @@ def rm_pending_output_callback(output_callbacks, jobcachepending,
296
300
_logger .debug (u"[job %s] path mappings is %s" , j .name ,
297
301
json .dumps ({p : builder .pathmapper .mapper (p ) for p in builder .pathmapper .files ()}, indent = 4 ))
298
302
299
- _check_adjust = partial (check_adjust , builder )
303
+ _check_adjust = partial (check_adjust , builder , jobname )
300
304
301
305
visit_class ([builder .files , builder .bindings ], ("File" , "Directory" ), _check_adjust )
302
306
@@ -368,8 +372,38 @@ def rm_pending_output_callback(output_callbacks, jobcachepending,
368
372
ls [i ] = t ["entry" ]
369
373
j .generatefiles [u"listing" ] = ls
370
374
375
+ inplaceUpdateReq = self .get_requirement ("http://commonwl.org/cwltool#InplaceUpdateRequirement" )[0 ]
376
+
377
+ if inplaceUpdateReq :
378
+ j .inplace_update = inplaceUpdateReq ["inplaceUpdate" ]
371
379
normalizeFilesDirs (j .generatefiles )
372
380
381
+ readers = {}
382
+ muts = set ()
383
+
384
+ def register_mut (f ):
385
+ muts .add (f ["location" ])
386
+ builder .mutation_manager .register_mutation (j .name , f )
387
+
388
+ def register_reader (f ):
389
+ if f ["location" ] not in muts :
390
+ builder .mutation_manager .register_reader (j .name , f )
391
+ readers [f ["location" ]] = f
392
+
393
+ for li in j .generatefiles ["listing" ]:
394
+ li = cast (Dict [Text , Any ], li )
395
+ if li .get ("writable" ) and j .inplace_update :
396
+ adjustFileObjs (li , register_mut )
397
+ adjustDirObjs (li , register_mut )
398
+ else :
399
+ adjustFileObjs (li , register_reader )
400
+ adjustDirObjs (li , register_reader )
401
+
402
+ adjustFileObjs (builder .files , register_reader )
403
+ adjustFileObjs (builder .bindings , register_reader )
404
+ adjustDirObjs (builder .files , register_reader )
405
+ adjustDirObjs (builder .bindings , register_reader )
406
+
373
407
j .environment = {}
374
408
evr = self .get_requirement ("EnvVarRequirement" )[0 ]
375
409
if evr :
@@ -391,16 +425,17 @@ def rm_pending_output_callback(output_callbacks, jobcachepending,
391
425
j .pathmapper = builder .pathmapper
392
426
j .collect_outputs = partial (
393
427
self .collect_output_ports , self .tool ["outputs" ], builder ,
394
- compute_checksum = kwargs .get ("compute_checksum" , True ))
428
+ compute_checksum = kwargs .get ("compute_checksum" , True ),
429
+ jobname = jobname ,
430
+ readers = readers )
395
431
j .output_callback = output_callbacks
396
432
397
433
yield j
398
434
399
- def collect_output_ports (self , ports , builder , outdir , compute_checksum = True ):
400
- # type: (Set[Dict[Text, Any]], Builder, Text, bool) -> Dict[Text, Union[Text, List[Any], Dict[Text, Any]]]
435
+ def collect_output_ports (self , ports , builder , outdir , compute_checksum = True , jobname = "" , readers = None ):
436
+ # type: (Set[Dict[Text, Any]], Builder, Text, bool, Text, Dict[Text, Any] ) -> Dict[Text, Union[Text, List[Any], Dict[Text, Any]]]
401
437
ret = {} # type: Dict[Text, Union[Text, List[Any], Dict[Text, Any]]]
402
438
try :
403
-
404
439
fs_access = builder .make_fs_access (outdir )
405
440
custom_output = fs_access .join (outdir , "cwl.output.json" )
406
441
if fs_access .exists (custom_output ):
@@ -429,14 +464,21 @@ def collect_output_ports(self, ports, builder, outdir, compute_checksum=True):
429
464
visit_class (ret , ("File" , "Directory" ), cast (Callable [[Any ], Any ], revmap ))
430
465
visit_class (ret , ("File" , "Directory" ), remove_path )
431
466
normalizeFilesDirs (ret )
467
+ adjustFileObjs (ret , builder .mutation_manager .set_generation )
432
468
visit_class (ret , ("File" , "Directory" ), partial (check_valid_locations , fs_access ))
469
+
433
470
if compute_checksum :
434
471
adjustFileObjs (ret , partial (compute_checksums , fs_access ))
435
472
436
- validate .validate_ex (self .names .get_name ("outputs_record_schema" , "" ), ret )
473
+ validate .validate_ex (self .names .get_name ("outputs_record_schema" , "" ), ret ,
474
+ strict = False , logger = _logger_validation_warnings )
437
475
return ret if ret is not None else {}
438
476
except validate .ValidationException as e :
439
477
raise WorkflowException ("Error validating output record. " + Text (e ) + "\n in " + json .dumps (ret , indent = 4 ))
478
+ finally :
479
+ if readers :
480
+ for r in readers .values ():
481
+ builder .mutation_manager .release_reader (jobname , r )
440
482
441
483
def collect_output (self , schema , builder , outdir , fs_access , compute_checksum = True ):
442
484
# type: (Dict[Text, Any], Builder, Text, StdFsAccess, bool) -> Union[Dict[Text, Any], List[Union[Dict[Text, Any], Text]]]
0 commit comments