From de6dd2f906624853b46af512e44eb93ffe79250e Mon Sep 17 00:00:00 2001 From: Antonio Gonzalez Date: Wed, 26 Aug 2020 10:29:07 -0600 Subject: [PATCH 1/2] some general fixes/additions for next release --- README.rst | 6 ++---- qiita_db/processing_job.py | 25 +++++++++++++++++++++++++ qiita_db/test/test_processing_job.py | 14 ++++++++++++-- 3 files changed, 39 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index e04fae360..e2ed9e94d 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ compute resources to the global community, alleviating the technical burdens, such as familiarity with the command line or access to compute power, that are typically limiting for researchers studying microbial ecology. -Qiita is currently in alpha status. We are very open to community +Qiita is currently in beta status. We are very open to community contributions and feedback. If you're interested in contributing to Qiita, see `CONTRIBUTING.md `__. If you'd like to report bugs or request features, you can do that in the @@ -43,9 +43,7 @@ Current features * Target gene data: we support deblur against GreenGenes (13_8) and close reference picking against GreenGenes (13_8) and Silva. - * Metagenoic/Shotgun data: we support Shogun processing. Note that this data - is suitable for download and further down stream analyses but we don't recommend - meta-analysis within Qiita (only single study). + * Metagenomic and Metatranscriptomic data: we support Shogun processing. * biom files can be added as new preparation templates for downstream analyses; however, this cannot be made public. diff --git a/qiita_db/processing_job.py b/qiita_db/processing_job.py index 2bdd2d7c3..58de9a8ef 100644 --- a/qiita_db/processing_job.py +++ b/qiita_db/processing_job.py @@ -821,6 +821,31 @@ def external_id(self, value): qdb.sql_connection.TRN.add(sql, [value, self.id]) qdb.sql_connection.TRN.execute() + @property + def release_validator_job(self): + """Retrieves the release validator job + + Returns + ------- + qiita_db.processing_job.ProcessingJob or None + The release validator job of this job + """ + rvalidator = None + with qdb.sql_connection.TRN: + sql = """SELECT processing_job_id + FROM qiita.processing_job + WHERE command_id in ( + SELECT command_id + FROM qiita.software_command + WHERE name = 'release_validators') + AND command_parameters->>'job' = %s""" + qdb.sql_connection.TRN.add(sql, [self.id]) + results = qdb.sql_connection.TRN.execute_fetchflatten() + if results: + rvalidator = ProcessingJob(results[0]) + + return rvalidator + def submit(self, parent_job_id=None, dependent_jobs_list=None): """Submits the job to execution This method has the ability to submit itself, as well as a list of diff --git a/qiita_db/test/test_processing_job.py b/qiita_db/test/test_processing_job.py index fe887ce8c..a3636b699 100644 --- a/qiita_db/test/test_processing_job.py +++ b/qiita_db/test/test_processing_job.py @@ -446,6 +446,10 @@ def test_complete_type(self): qdb.artifact.Artifact(exp_artifact_count).filepaths]) def test_complete_success(self): + # Note that here we are submitting and creating other multiple jobs; + # thus here is the best place to test any intermediary steps/functions + # of the job creation, submission, exectution, and completion. + # # This first part of the test is just to test that by default the # naming of the output artifact will be the name of the output fd, fp = mkstemp(suffix='_table.biom') @@ -457,6 +461,10 @@ def test_complete_success(self): 'artifact_type': 'BIOM'}} job = _create_job() job._set_status('running') + + # here we can test that job.release_validator_job hasn't been created + # yet so it has to be None + self.assertIsNone(job.release_validator_job) job.complete(True, artifacts_data=artifacts_data) self._wait_for_job(job) # Retrieve the job that is performing the validation: @@ -464,6 +472,7 @@ def test_complete_success(self): self.assertEqual(len(validators), 1) # the validator actually runs on the system so it gets an external_id # assigned, let's test that is not None + self.assertFalse(validators[0].external_id == 'Not Available') # Test the output artifact is going to be named based on the # input parameters @@ -858,8 +867,9 @@ def test_raise_if_not_in_construction_error(self): tester._raise_if_not_in_construction() def test_submit(self): - # In order to test a success, we need to actually run the jobs, which - # will mean to run split libraries, for example. + # The submit method is being tested in test_complete_success via + # a job, its release validators and validators submissions. + # Leaving this note here in case it's helpful for future development pass def test_from_default_workflow(self): From f608f3284c868542c678f14cec1e302484e88fe5 Mon Sep 17 00:00:00 2001 From: Antonio Gonzalez Date: Thu, 27 Aug 2020 06:39:27 -0600 Subject: [PATCH 2/2] adding test for not None job.release_validator_job --- qiita_db/test/test_processing_job.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/qiita_db/test/test_processing_job.py b/qiita_db/test/test_processing_job.py index a3636b699..28e89b150 100644 --- a/qiita_db/test/test_processing_job.py +++ b/qiita_db/test/test_processing_job.py @@ -467,12 +467,15 @@ def test_complete_success(self): self.assertIsNone(job.release_validator_job) job.complete(True, artifacts_data=artifacts_data) self._wait_for_job(job) + # let's check for the job that released the validators + self.assertIsNotNone(job.release_validator_job) + self.assertEqual(job.release_validator_job.parameters.values['job'], + job.id) # Retrieve the job that is performing the validation: validators = list(job.validator_jobs) self.assertEqual(len(validators), 1) # the validator actually runs on the system so it gets an external_id # assigned, let's test that is not None - self.assertFalse(validators[0].external_id == 'Not Available') # Test the output artifact is going to be named based on the # input parameters