diff --git a/.travis.yml b/.travis.yml index 445b48ba8..99a5b5942 100644 --- a/.travis.yml +++ b/.travis.yml @@ -41,19 +41,17 @@ install: - mkdir ~/.qiita_plugins - cp $PWD/qiita_core/support_files/BIOM\ type_2.1.4.conf ~/.qiita_plugins before_script: - # Some of the tests rely on the plugin system to complete successfuly. - # Thus, we need a qiita webserver running to be able to execute the tests. - - export MOI_CONFIG_FP=`pwd`/qiita_core/support_files/config_test.cfg # EBI, see the end of before_install about why this block is commented out # - if [ ${TRAVIS_PULL_REQUEST} == "false" ]; then # export QIITA_CONFIG_FP=`pwd`/qiita_core/support_files/config_test_travis.cfg; - # export MOI_CONFIG_FP=`pwd`/qiita_core/support_files/config_test_travis.cfg; # fi - qiita-env make --no-load-ontologies - qiita-test-install script: - - sleep 5 + # Some of the tests rely on the plugin system to complete successfuly. + # Thus, we need a qiita webserver running to be able to execute the tests. - qiita pet webserver --no-build-docs start & + - sleep 5 - QIITA_PID=$! - nosetests $COVER_PACKAGE --with-doctest --with-coverage --with-timer -v --cover-package=$COVER_PACKAGE - kill $QIITA_PID diff --git a/qiita_core/testing.py b/qiita_core/testing.py index c85c2c235..e06c1692a 100644 --- a/qiita_core/testing.py +++ b/qiita_core/testing.py @@ -38,14 +38,7 @@ def wait_for_prep_information_job(prep_id, raise_if_none=True): if res is not None: payload = loads(res) job_id = payload['job_id'] - if payload['is_qiita_job']: - wait_for_processing_job(job_id) - else: - redis_info = loads(r_client.get(job_id)) - while redis_info['status_msg'] == 'Running': - sleep(0.5) - redis_info = loads(r_client.get(job_id)) - sleep(0.5) + wait_for_processing_job(job_id) def wait_for_processing_job(job_id): diff --git a/qiita_db/commands.py b/qiita_db/commands.py index 00106dcf8..236375a67 100644 --- a/qiita_db/commands.py +++ b/qiita_db/commands.py @@ -9,8 +9,6 @@ from functools import partial from future import standard_library from json import loads -from sys import exc_info -import traceback import qiita_db as qdb @@ -270,27 +268,3 @@ def update_artifact_from_cmd(filepaths, filepath_types, artifact_id): qdb.sql_connection.TRN.execute() return artifact - - -def complete_job_cmd(job_id, payload): - """Completes the given job - - Parameters - ---------- - job_id : str - The job id - payload : str - JSON string with the payload to complete the job - """ - payload = loads(payload) - if payload['success']: - artifacts = payload['artifacts'] - error = None - else: - artifacts = None - error = payload['error'] - job = qdb.processing_job.ProcessingJob(job_id) - try: - job.complete(payload['success'], artifacts, error) - except: - job._set_error(traceback.format_exception(*exc_info())) diff --git a/qiita_db/environment_manager.py b/qiita_db/environment_manager.py index b6981f045..9881c3b4a 100644 --- a/qiita_db/environment_manager.py +++ b/qiita_db/environment_manager.py @@ -17,7 +17,7 @@ from future.utils import viewitems from qiita_core.exceptions import QiitaEnvironmentError -from qiita_core.qiita_settings import qiita_config +from qiita_core.qiita_settings import qiita_config, r_client import qiita_db as qdb @@ -318,6 +318,8 @@ def drop_and_rebuild_tst_database(): qdb.sql_connection.TRN.execute() + r_client.flushdb() + def reset_test_database(wrapped_fn): """Decorator that drops the qiita schema, rebuilds and repopulates the diff --git a/qiita_db/handlers/processing_job.py b/qiita_db/handlers/processing_job.py index 84efc21af..41fbe45cb 100644 --- a/qiita_db/handlers/processing_job.py +++ b/qiita_db/handlers/processing_job.py @@ -7,7 +7,6 @@ # ----------------------------------------------------------------------------- from json import loads -from multiprocessing import Process from tornado.web import HTTPError @@ -46,26 +45,6 @@ def _get_job(job_id): return job -def _job_completer(job_id, payload): - """Completes a job - - Parameters - ---------- - job_id : str - The job to complete - payload : str - The JSON string with the parameters of the HTTP POST request that is - completing the job - """ - import qiita_db as qdb - - success, error = qdb.processing_job.private_job_submitter( - "Complete job %s" % job_id, 'complete_job', [job_id, payload]) - if not success: - job = qdb.processing_job.ProcessingJob(job_id) - job.complete(False, error=error) - - class JobHandler(OauthBaseHandler): @authenticate_oauth def get(self, job_id): @@ -158,10 +137,14 @@ def post(self, job_id): raise HTTPError( 403, "Can't complete job: not in a running state") - p = Process(target=_job_completer, - args=(job_id, self.request.body)) - p.start() - # safe_submit(job.user.email, _job_completer, job_id, payload) + qiita_plugin = qdb.software.Software.from_name_and_version( + 'Qiita', 'alpha') + cmd = qiita_plugin.get_command('complete_job') + params = qdb.software.Parameters.load( + cmd, values_dict={'job_id': job_id, + 'payload': self.request.body}) + job = qdb.processing_job.ProcessingJob.create(job.user, params) + job.submit() self.finish() diff --git a/qiita_db/processing_job.py b/qiita_db/processing_job.py index 86453d4f5..77ed78d30 100644 --- a/qiita_db/processing_job.py +++ b/qiita_db/processing_job.py @@ -53,45 +53,23 @@ def _system_call(cmd): return stdout, stderr, return_value -def _job_submitter(job, cmd): +def _job_submitter(job_id, cmd): """Executes the commands `cmd` and updates the job in case of failure Parameters ---------- - job : qiita_db.processing_job.ProcesingJob - The job that is executed by cmd + job_id : str + The job id that is executed by cmd cmd : str The command to execute the job """ std_out, std_err, return_value = _system_call(cmd) if return_value != 0: - error = ("Error submitting job '%s':\nStd output:%s\nStd error:%s" - % (job.id, std_out, std_err)) - job.complete(False, error=error) - - -def private_job_submitter(job_name, command, args): - """Submits a private job - - Parameters - ---------- - job_name : str - The name of the job - command: str - The private command to be executed - args: list of str - The arguments to the private command - """ - - cmd = "%s '%s' %s %s" % (qiita_config.private_launcher, - qiita_config.qiita_env, command, - ' '.join("'%s'" % a for a in args)) - std_out, std_err, return_value = _system_call(cmd) - error = "" - if return_value != 0: - error = ("Can't submit private task '%s':\n" - "Std output:%s\nStd error: %s" % (command, std_out, std_err)) - return (return_value == 0), error + error = ("Error submitting job:\nStd output:%s\nStd error:%s" + % (std_out, std_err)) + # Forcing the creation of a new connection + qdb.sql_connection.create_new_transaction() + ProcessingJob(job_id).complete(False, error=error) class ProcessingJob(qdb.base.QiitaObject): @@ -347,14 +325,19 @@ def submit(self): QiitaDBOperationNotPermittedError If the job is not in 'waiting' or 'in_construction' status """ - status = self.status - if status not in {'in_construction', 'waiting'}: - raise qdb.exceptions.QiitaDBOperationNotPermittedError( - "Can't submit job, not in 'in_construction' or " - "'waiting' status. Current status: %s" % status) - self._set_status('queued') + with qdb.sql_connection.TRN: + status = self.status + if status not in {'in_construction', 'waiting'}: + raise qdb.exceptions.QiitaDBOperationNotPermittedError( + "Can't submit job, not in 'in_construction' or " + "'waiting' status. Current status: %s" % status) + self._set_status('queued') + # At this point we are going to involve other processes. We need + # to commit the changes to the DB or the other processes will not + # see these changes + qdb.sql_connection.TRN.commit() cmd = self._generate_cmd() - p = Process(target=_job_submitter, args=(self, cmd)) + p = Process(target=_job_submitter, args=(self.id, cmd)) p.start() def release(self): diff --git a/qiita_db/sql_connection.py b/qiita_db/sql_connection.py index 38648329c..a4ebed078 100644 --- a/qiita_db/sql_connection.py +++ b/qiita_db/sql_connection.py @@ -646,9 +646,12 @@ def _raise_execution_error(self, sql, sql_args, error): """ self.rollback() - raise ValueError( - "Error running SQL: %s. MSG: %s\n" % ( - errorcodes.lookup(error.pgcode), error.message)) + try: + ec_lu = errorcodes.lookup(error.pgcode) + raise ValueError( + "Error running SQL: %s. MSG: %s\n" % (ec_lu, error.message)) + except KeyError: + raise ValueError("Error running SQL query: %s" % error.message) @_checker def add(self, sql, sql_args=None, many=False): @@ -882,11 +885,13 @@ def rollback(self): # Reset the queries, the results and the index self._queries = [] self._results = [] - try: - self._connection.rollback() - except Exception: - self._connection.close() - raise + + if self._connection is not None and self._connection.closed == 0: + try: + self._connection.rollback() + except Exception: + self._connection.close() + raise # Execute the post rollback functions self._funcs_executor(self._post_rollback_funcs, "rollback") @@ -937,3 +942,12 @@ def add_post_rollback_func(self, func, *args, **kwargs): # Singleton pattern, create the transaction for the entire system TRN = Transaction() + + +def create_new_transaction(): + """Creates a new global transaction + + This is needed when using multiprocessing + """ + global TRN + TRN = Transaction() diff --git a/qiita_db/support_files/patches/python_patches/58.py b/qiita_db/support_files/patches/python_patches/58.py index aed773c90..79c5f1dc7 100644 --- a/qiita_db/support_files/patches/python_patches/58.py +++ b/qiita_db/support_files/patches/python_patches/58.py @@ -14,24 +14,24 @@ qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') # Create the submit command for VAMPS command - parameters = {'artifact': ['artifact:["Demultiplexed"]', None]} + parameters = {'artifact': ['integer', None]} Command.create(qiita_plugin, "submit_to_VAMPS", "submits an artifact to VAMPS", parameters) # Create the copy artifact command - parameters = {'artifact': ['artifact:["Demultiplexed"]', None], + parameters = {'artifact': ['integer', None], 'prep_template': ['prep_template', None]} Command.create(qiita_plugin, "copy_artifact", "Creates a copy of an artifact", parameters) # Create the submit command for EBI command - parameters = {'artifact': ['artifact:["Demultiplexed"]', None], + parameters = {'artifact': ['integer', None], 'submission_type': ['choice:["ADD", "MODIFY"]', 'ADD']} Command.create(qiita_plugin, "submit_to_EBI", "submits an artifact to EBI", parameters) # Create the submit command for delete_artifact - parameters = {'artifact': ['artifact:["Demultiplexed"]', None]} + parameters = {'artifact': ['integer', None]} Command.create(qiita_plugin, "delete_artifact", "Delete an artifact", parameters) @@ -41,3 +41,34 @@ 'is_mapping_file': ['boolean', True], 'data_type': ['string', None]} Command.create(qiita_plugin, "create_sample_template", "Create a sample template", parameters) + + # Create the update sample template command + parameters = {'study': ['integer', None], 'template_fp': ['string', None]} + Command.create(qiita_plugin, "update_sample_template", + "Updates the sample template", parameters) + + # Create the delete sample template command + parameters = {'study': ['integer', None]} + Command.create(qiita_plugin, "delete_sample_template", + "Deletes a sample template", parameters) + + # Create the update prep template command + parameters = {'prep_template': ['integer', None], + 'template_fp': ['string', None]} + Command.create(qiita_plugin, "update_prep_template", + "Updates the prep template", parameters) + + # Create the delete sample or column command + parameters = { + 'obj_class': ['choice:["SampleTemplate", "PrepTemplate"]', None], + 'obj_id': ['integer', None], + 'sample_or_col': ['choice:["samples", "columns"]', None], + 'name': ['string', None]} + Command.create(qiita_plugin, "delete_sample_or_column", + "Deletes a sample or a columns from the metadata", + parameters) + + # Create the command to complete a job + parameters = {'job_id': ['string', None], 'payload': ['string', None]} + Command.create(qiita_plugin, "complete_job", "Completes a given job", + parameters) diff --git a/qiita_db/test/test_commands.py b/qiita_db/test/test_commands.py index a479e43be..94d1b6a7f 100644 --- a/qiita_db/test/test_commands.py +++ b/qiita_db/test/test_commands.py @@ -15,10 +15,8 @@ from future import standard_library from functools import partial from operator import itemgetter -from json import dumps import pandas as pd -import numpy.testing as npt from qiita_core.util import qiita_test_checker @@ -216,7 +214,7 @@ def tearDown(self): def test_load_parameters_from_cmd_error(self): with self.assertRaises(qdb.exceptions.QiitaDBUnknownIDError): qdb.commands.load_parameters_from_cmd( - "test", self.fp, 20) + "test", self.fp, 20000) def test_load_parameters_from_cmd_error_format(self): with self.assertRaises(ValueError): @@ -404,84 +402,6 @@ def test_update_artifact_from_cmd(self): self.assertEqual(qdb.util.compute_checksum(obs[1]), exp) -@qiita_test_checker() -class TestCompleteJobCmd(TestCase): - def setUp(self): - self._clean_up_files = [] - - def tearDown(self): - for fp in self._clean_up_files: - if exists(fp): - remove(fp) - - def test_complete_success(self): - pt = npt.assert_warns( - qdb.exceptions.QiitaDBWarning, - qdb.metadata_template.prep_template.PrepTemplate.create, - pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), - qdb.study.Study(1), '16S') - job = qdb.processing_job.ProcessingJob.create( - qdb.user.User('test@foo.bar'), - qdb.software.Parameters.load( - qdb.software.Command.get_validator('BIOM'), - values_dict={'template': pt.id, 'files': - dumps({'BIOM': ['file']}), - 'artifact_type': 'BIOM'})) - job._set_status('running') - - fd, fp = mkstemp(suffix='_table.biom') - close(fd) - with open(fp, 'w') as f: - f.write('\n') - - self._clean_up_files.append(fp) - - exp_artifact_count = qdb.util.get_count('qiita.artifact') + 1 - payload = dumps( - {'success': True, 'error': '', - 'artifacts': {'OTU table': {'filepaths': [(fp, 'biom')], - 'artifact_type': 'BIOM'}}}) - qdb.commands.complete_job_cmd(job.id, payload) - self.assertEqual(job.status, 'success') - self.assertEqual(qdb.util.get_count('qiita.artifact'), - exp_artifact_count) - - def test_complete_job_error(self): - payload = dumps({'success': False, 'error': 'Job failure'}) - qdb.commands.complete_job_cmd( - 'bcc7ebcd-39c1-43e4-af2d-822e3589f14d', payload) - job = qdb.processing_job.ProcessingJob( - 'bcc7ebcd-39c1-43e4-af2d-822e3589f14d') - self.assertEqual(job.status, 'error') - self.assertEqual(job.log, - qdb.logger.LogEntry.newest_records(numrecords=1)[0]) - self.assertEqual(job.log.msg, 'Job failure') - - def test_complete_error(self): - pt = npt.assert_warns( - qdb.exceptions.QiitaDBWarning, - qdb.metadata_template.prep_template.PrepTemplate.create, - pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), - qdb.study.Study(1), '16S') - job = qdb.processing_job.ProcessingJob.create( - qdb.user.User('test@foo.bar'), - qdb.software.Parameters.load( - qdb.software.Command.get_validator('BIOM'), - values_dict={'template': pt.id, 'files': - dumps({'BIOM': ['file']}), - 'artifact_type': 'BIOM'})) - job._set_status('running') - - fp = '/surprised/if/this/path/exists.biom' - payload = dumps( - {'success': True, 'error': '', - 'artifacts': {'OTU table': {'filepaths': [(fp, 'biom')], - 'artifact_type': 'BIOM'}}}) - qdb.commands.complete_job_cmd(job.id, payload) - self.assertEqual(job.status, 'error') - self.assertIn('No such file or directory', job.log.msg) - - CONFIG_1 = """[required] timeseries_type_id = 1 metadata_complete = True diff --git a/qiita_db/test/test_processing_job.py b/qiita_db/test/test_processing_job.py index 84fb9ef4f..80a53fa4a 100644 --- a/qiita_db/test/test_processing_job.py +++ b/qiita_db/test/test_processing_job.py @@ -66,17 +66,17 @@ def test_job_submitter(self): # it doesn't raise an error job = _create_job() cmd = 'echo "Test system call stdout"' - qdb.processing_job._job_submitter(job, cmd) + qdb.processing_job._job_submitter(job.id, cmd) def test_job_submitter_error(self): # Same comment as above, but here we are going to force failure, and # check that the job is updated correctly job = _create_job() cmd = '>&2 echo "Test system call stderr"; exit 1' - qdb.processing_job._job_submitter(job, cmd) + qdb.processing_job._job_submitter(job.id, cmd) self.assertEqual(job.status, 'error') - exp = ("Error submitting job '%s':\nStd output:\nStd error:" - "Test system call stderr\n" % job.id) + exp = ("Error submitting job:\nStd output:\nStd error:" + "Test system call stderr\n") self.assertEqual(job.log.msg, exp) diff --git a/qiita_pet/handlers/api_proxy/prep_template.py b/qiita_pet/handlers/api_proxy/prep_template.py index 7946617ff..2891243f8 100644 --- a/qiita_pet/handlers/api_proxy/prep_template.py +++ b/qiita_pet/handlers/api_proxy/prep_template.py @@ -10,15 +10,13 @@ from os import remove from os.path import basename from json import loads, dumps +from collections import defaultdict from natsort import natsorted from qiita_core.util import execute_as_transaction from qiita_core.qiita_settings import r_client from qiita_pet.handlers.api_proxy.util import check_access, check_fp -from qiita_ware.context import safe_submit -from qiita_ware.dispatchable import ( - update_prep_template, delete_sample_or_column) from qiita_db.metadata_template.util import load_template_to_dataframe from qiita_db.util import convert_to_id, get_files_from_uploads_folders from qiita_db.study import Study @@ -26,6 +24,7 @@ from qiita_db.ontology import Ontology from qiita_db.metadata_template.prep_template import PrepTemplate from qiita_db.processing_job import ProcessingJob +from qiita_db.software import Software, Parameters PREP_TEMPLATE_KEY_FORMAT = 'prep_template_%s' @@ -109,51 +108,26 @@ def prep_template_ajax_get_req(user_id, prep_id): name = "Prep information %d" % prep_id pt = PrepTemplate(prep_id) + # Initialize variables here + processing = False + alert_type = '' + alert_msg = '' job_info = r_client.get(PREP_TEMPLATE_KEY_FORMAT % prep_id) if job_info: - job_info = loads(job_info) + job_info = defaultdict(lambda: '', loads(job_info)) job_id = job_info['job_id'] - if job_id: - if job_info['is_qiita_job']: - job = ProcessingJob(job_id) - processing = job.status in ('queued', 'running') - success = job.status == 'success' - alert_type = 'info' if processing or success else 'danger' - alert_msg = (job.log.msg.replace('\n', '
') - if job.log is not None else "") - else: - alert_type = 'info' - alert_msg = '' - # this is not actually necessary but in case of a system - # failure this will avoid the error - ji = r_client.get(job_id) - if ji: - redis_info = loads(ji) - processing = redis_info['status_msg'] == 'Running' - success = redis_info['status_msg'] == 'Success' - if redis_info['return'] is not None: - alert_type = redis_info['return']['status'] - alert_msg = redis_info['return']['message'].replace( - '\n', '
') - - if processing: - alert_type = 'info' - alert_msg = 'This prep template is currently being updated' - elif success: - payload = {'job_id': None, - 'status': alert_type, - 'message': alert_msg, - 'is_qiita_job': job_info['is_qiita_job']} - r_client.set(PREP_TEMPLATE_KEY_FORMAT % prep_id, - dumps(payload)) + job = ProcessingJob(job_id) + job_status = job.status + processing = job_status not in ('success', 'error') + if processing: + alert_type = 'info' + alert_msg = 'This prep template is currently being updated' + elif job_status == 'error': + alert_type = 'danger' + alert_msg = job.log.msg.replace('\n', '
') else: - processing = False - alert_type = job_info['status'] - alert_msg = job_info['message'].replace('\n', '
') - else: - processing = False - alert_type = '' - alert_msg = '' + alert_type = job_info['alert_type'] + alert_msg = job_info['alert_msg'].replace('\n', '
') artifact_attached = pt.artifact is not None study_id = pt.study_id @@ -456,9 +430,15 @@ def prep_template_patch_req(user_id, req_op, req_path, req_value=None, if fp['status'] != 'success': return fp fp = fp['file'] - job_id = safe_submit(user_id, update_prep_template, prep_id, fp) + qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') + cmd = qiita_plugin.get_command('update_prep_template') + params = Parameters.load( + cmd, values_dict={'prep_template': prep_id, 'template_fp': fp}) + job = ProcessingJob.create(User(user_id), params) + r_client.set(PREP_TEMPLATE_KEY_FORMAT % prep_id, - dumps({'job_id': job_id, 'is_qiita_job': False})) + dumps({'job_id': job.id})) + job.submit() else: # We don't understand the attribute so return an error return {'status': 'error', @@ -483,12 +463,18 @@ def prep_template_patch_req(user_id, req_op, req_path, req_value=None, if access_error: return access_error - # Offload the deletion of the column to the cluster - job_id = safe_submit(user_id, delete_sample_or_column, PrepTemplate, - prep_id, attribute, attr_id) + qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') + cmd = qiita_plugin.get_command('delete_sample_or_column') + params = Parameters.load( + cmd, values_dict={'obj_class': 'PrepTemplate', + 'obj_id': prep_id, + 'sample_or_col': attribute, + 'name': attr_id}) + job = ProcessingJob.create(User(user_id), params) # Store the job id attaching it to the sample template id r_client.set(PREP_TEMPLATE_KEY_FORMAT % prep_id, - dumps({'job_id': job_id, 'is_qiita_job': False})) + dumps({'job_id': job.id})) + job.submit() return {'status': 'success', 'message': '', 'row_id': row_id} else: return {'status': 'error', @@ -559,7 +545,7 @@ def prep_template_delete_req(prep_id, user_id): try: PrepTemplate.delete(prep.id) except Exception as e: - msg = ("Couldn't remove prep template: %s" % str(e)) + msg = str(e) status = 'error' return {'status': status, diff --git a/qiita_pet/handlers/api_proxy/sample_template.py b/qiita_pet/handlers/api_proxy/sample_template.py index 08f4605b0..896af1fdb 100644 --- a/qiita_pet/handlers/api_proxy/sample_template.py +++ b/qiita_pet/handlers/api_proxy/sample_template.py @@ -7,6 +7,7 @@ # ----------------------------------------------------------------------------- from __future__ import division from json import loads, dumps +from collections import defaultdict from natsort import natsorted @@ -20,10 +21,6 @@ from qiita_db.user import User from qiita_db.software import Software, Parameters from qiita_db.processing_job import ProcessingJob -from qiita_ware.dispatchable import ( - update_sample_template, delete_sample_template, - delete_sample_or_column) -from qiita_ware.context import safe_submit from qiita_pet.handlers.api_proxy.util import check_access, check_fp SAMPLE_TEMPLATE_KEY_FORMAT = 'sample_template_%s' @@ -193,44 +190,26 @@ def sample_template_category_get_req(category, samp_id, user_id): def get_sample_template_processing_status(st_id): + # Initialize variables here + processing = False + alert_type = '' + alert_msg = '' job_info = r_client.get(SAMPLE_TEMPLATE_KEY_FORMAT % st_id) if job_info: - job_info = loads(job_info) + job_info = defaultdict(lambda: '', loads(job_info)) job_id = job_info['job_id'] - if job_id: - redis_info = r_client.get(job_id) - if redis_info: - redis_info = loads(redis_info) - processing = redis_info['status_msg'] == 'Running' - if processing: - alert_type = 'info' - alert_msg = ('This sample template is currently being ' - 'processed') - elif redis_info['status_msg'] == 'Success': - alert_type = redis_info['return']['status'] - alert_msg = redis_info['return']['message'].replace( - '\n', '
') - payload = {'job_id': None, - 'status': alert_type, - 'message': alert_msg} - r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % st_id, - dumps(payload)) - else: - alert_type = redis_info['return']['status'] - alert_msg = redis_info['return']['message'].replace( - '\n', '
') - else: - processing = False - alert_type = '' - alert_msg = '' + job = ProcessingJob(job_id) + job_status = job.status + processing = job_status not in ('success', 'error') + if processing: + alert_type = 'info' + alert_msg = 'This sample template is currently being processed' + elif job_status == 'error': + alert_type = 'danger' + alert_msg = job.log.msg.replace('\n', '
') else: - processing = False - alert_type = job_info['status'] - alert_msg = job_info['message'].replace('\n', '
') - else: - processing = False - alert_type = '' - alert_msg = '' + alert_type = job_info['alert_type'] + alert_msg = job_info['alert_msg'].replace('\n', '
') return processing, alert_type, alert_msg @@ -361,7 +340,7 @@ def sample_template_post_req(study_id, user_id, data_type, job = ProcessingJob.create(User(user_id), params) r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id, - dumps({'job_id': job.id, 'is_qiita_job': True})) + dumps({'job_id': job.id})) # Store the job id attaching it to the sample template id job.submit() @@ -410,11 +389,17 @@ def sample_template_put_req(study_id, user_id, sample_template): status = 'success' # Offload the update of the sample template to the cluster - job_id = safe_submit(user_id, update_sample_template, int(study_id), - fp_rsp) + qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') + cmd = qiita_plugin.get_command('update_sample_template') + params = Parameters.load(cmd, values_dict={'study': int(study_id), + 'template_fp': fp_rsp}) + job = ProcessingJob.create(User(user_id), params) + # Store the job id attaching it to the sample template id r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id, - dumps({'job_id': job_id})) + dumps({'job_id': job.id})) + + job.submit() return {'status': status, 'message': msg, @@ -449,11 +434,16 @@ def sample_template_delete_req(study_id, user_id): if access_error: return access_error - # Offload the deletion of the sample template to the cluster - job_id = safe_submit(user_id, delete_sample_template, int(study_id)) + qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') + cmd = qiita_plugin.get_command('delete_sample_template') + params = Parameters.load(cmd, values_dict={'study': int(study_id)}) + job = ProcessingJob.create(User(user_id), params) + # Store the job id attaching it to the sample template id r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id, - dumps({'job_id': job_id})) + dumps({'job_id': job.id})) + + job.submit() return {'status': 'success', 'message': ''} @@ -545,12 +535,19 @@ def sample_template_patch_request(user_id, req_op, req_path, req_value=None, if access_error: return access_error - # Offload the deletion of the sample or column to the cluster - job_id = safe_submit(user_id, delete_sample_or_column, SampleTemplate, - int(st_id), attribute, attr_id) + qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') + cmd = qiita_plugin.get_command('delete_sample_or_column') + params = Parameters.load( + cmd, values_dict={'obj_class': 'SampleTemplate', + 'obj_id': int(st_id), 'sample_or_col': attribute, + 'name': attr_id}) + job = ProcessingJob.create(User(user_id), params) + # Store the job id attaching it to the sample template id r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % st_id, - dumps({'job_id': job_id})) + dumps({'job_id': job.id})) + + job.submit() return {'status': 'success', 'message': '', 'row_id': row_id} diff --git a/qiita_pet/handlers/api_proxy/tests/test_prep_template.py b/qiita_pet/handlers/api_proxy/tests/test_prep_template.py index 3ec3511dc..e0596603f 100644 --- a/qiita_pet/handlers/api_proxy/tests/test_prep_template.py +++ b/qiita_pet/handlers/api_proxy/tests/test_prep_template.py @@ -10,7 +10,6 @@ from os.path import join, exists from string import ascii_letters from random import choice -from time import sleep from json import loads import pandas as pd @@ -18,6 +17,7 @@ from qiita_core.util import qiita_test_checker from qiita_core.qiita_settings import r_client +from qiita_core.testing import wait_for_processing_job from qiita_db.artifact import Artifact from qiita_db.metadata_template.prep_template import PrepTemplate from qiita_db.ontology import Ontology @@ -288,10 +288,7 @@ def _wait_for_parallel_job(self, key): # so we need to make sure that all processes are done before we reset # the test database obs = r_client.get(key) - redis_info = loads(r_client.get(loads(obs)['job_id'])) - while redis_info['status_msg'] == 'Running': - sleep(0.5) - redis_info = loads(r_client.get(loads(obs)['job_id'])) + wait_for_processing_job(loads(obs)['job_id']) def test_prep_template_graph_get_req(self): obs = prep_template_graph_get_req(1, 'test@foo.bar') @@ -527,9 +524,8 @@ def test_prep_template_delete_req(self): def test_prep_template_delete_req_attached_artifact(self): obs = prep_template_delete_req(1, 'test@foo.bar') exp = {'status': 'error', - 'message': "Couldn't remove prep template: Cannot remove prep " - "template 1 because it has an artifact associated " - "with it"} + 'message': "Cannot remove prep template 1 because it has an " + "artifact associated with it"} self.assertEqual(obs, exp) def test_prep_template_delete_req_no_access(self): diff --git a/qiita_pet/handlers/api_proxy/tests/test_sample_template.py b/qiita_pet/handlers/api_proxy/tests/test_sample_template.py index 0cdad1d2f..9c58568b5 100644 --- a/qiita_pet/handlers/api_proxy/tests/test_sample_template.py +++ b/qiita_pet/handlers/api_proxy/tests/test_sample_template.py @@ -8,11 +8,11 @@ from unittest import TestCase, main from os import remove, mkdir from os.path import join, exists -from time import sleep from json import loads, dumps from qiita_core.util import qiita_test_checker from qiita_core.qiita_settings import r_client +from qiita_core.testing import wait_for_processing_job import qiita_db as qdb from qiita_pet.handlers.api_proxy.sample_template import ( sample_template_summary_get_req, sample_template_post_req, @@ -140,17 +140,16 @@ def test_get_sample_template_processing_status(self): self.assertEqual(obs_at, "") self.assertEqual(obs_am, "") - # Without job id - r_client.set(key, dumps({'job_id': None, 'status': "success", - 'message': ""})) - obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) - self.assertFalse(obs_proc) - self.assertEqual(obs_at, "success") - self.assertEqual(obs_am, "") - # With job id and processing - r_client.set(key, dumps({'job_id': "test_job_id"})) - r_client.set("test_job_id", dumps({'status_msg': 'Running'})) + qiita_plugin = qdb.software.Software.from_name_and_version('Qiita', + 'alpha') + cmd = qiita_plugin.get_command('update_sample_template') + params = qdb.software.Parameters.load( + cmd, values_dict={'study': 1, 'template_fp': 'ignored'}) + job = qdb.processing_job.ProcessingJob.create( + qdb.user.User('test@foo.bar'), params) + job._set_status('running') + r_client.set(key, dumps({'job_id': job.id})) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertTrue(obs_proc) self.assertEqual(obs_at, "info") @@ -158,37 +157,25 @@ def test_get_sample_template_processing_status(self): obs_am, "This sample template is currently being processed") # With job id and success - r_client.set(key, dumps({'job_id': "test_job_id"})) - r_client.set("test_job_id", - dumps({'status_msg': 'Success', - 'return': {'status': 'success', - 'message': 'Some\nwarning'}})) + job._set_status('success') + r_client.set(key, dumps({'job_id': job.id, 'alert_type': 'warning', + 'alert_msg': 'Some\nwarning'})) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) - self.assertEqual(obs_at, "success") + self.assertEqual(obs_at, "warning") self.assertEqual(obs_am, "Some
warning") - obs = loads(r_client.get(key)) - self.assertEqual(obs, {'job_id': None, 'status': 'success', - 'message': 'Some
warning'}) # With job and not success - r_client.set(key, dumps({'job_id': "test_job_id"})) - r_client.set("test_job_id", - dumps({'status_msg': 'Failed', - 'return': {'status': 'error', - 'message': 'Some\nerror'}})) + job = qdb.processing_job.ProcessingJob.create( + qdb.user.User('test@foo.bar'), params) + job._set_status('running') + job._set_error('Some\nerror') + r_client.set(key, dumps({'job_id': job.id})) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) - self.assertEqual(obs_at, "error") + self.assertEqual(obs_at, "danger") self.assertEqual(obs_am, "Some
error") - # With job expired - r_client.set(key, dumps({'job_id': "non_existent_job"})) - obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) - self.assertFalse(obs_proc) - self.assertEqual(obs_at, "") - self.assertEqual(obs_am, "") - def test_sample_template_summary_get_req(self): obs = sample_template_summary_get_req(1, 'test@foo.bar') exp = { @@ -396,10 +383,7 @@ def test_sample_template_post_req(self): # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database - redis_info = loads(r_client.get(loads(obs)['job_id'])) - while redis_info['status_msg'] == 'Running': - sleep(0.5) - redis_info = loads(r_client.get(loads(obs)['job_id'])) + wait_for_processing_job(loads(obs)['job_id']) def test_sample_template_post_req_no_access(self): obs = sample_template_post_req(1, 'demo@microbio.me', '16S', @@ -422,10 +406,7 @@ def test_sample_template_put_req(self): # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database - redis_info = loads(r_client.get(loads(obs)['job_id'])) - while redis_info['status_msg'] == 'Running': - sleep(0.5) - redis_info = loads(r_client.get(loads(obs)['job_id'])) + wait_for_processing_job(loads(obs)['job_id']) def test_sample_template_put_req_no_access(self): obs = sample_template_put_req(1, 'demo@microbio.me', 'filepath') @@ -452,10 +433,7 @@ def test_sample_template_delete_req(self): # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database - redis_info = loads(r_client.get(loads(obs)['job_id'])) - while redis_info['status_msg'] == 'Running': - sleep(0.5) - redis_info = loads(r_client.get(loads(obs)['job_id'])) + wait_for_processing_job(loads(obs)['job_id']) def test_sample_template_delete_req_no_access(self): obs = sample_template_delete_req(1, 'demo@microbio.me') @@ -552,10 +530,7 @@ def test_sample_template_patch_request(self): # the test database obs = r_client.get('sample_template_1') self.assertIsNotNone(obs) - redis_info = loads(r_client.get(loads(obs)['job_id'])) - while redis_info['status_msg'] == 'Running': - sleep(0.5) - redis_info = loads(r_client.get(loads(obs)['job_id'])) + wait_for_processing_job(loads(obs)['job_id']) ST = qdb.metadata_template.sample_template.SampleTemplate self.assertNotIn("season_environment", ST(1).categories()) diff --git a/qiita_pet/handlers/artifact_handlers/tests/test_base_handlers.py b/qiita_pet/handlers/artifact_handlers/tests/test_base_handlers.py index cd4541bfc..c1e420201 100644 --- a/qiita_pet/handlers/artifact_handlers/tests/test_base_handlers.py +++ b/qiita_pet/handlers/artifact_handlers/tests/test_base_handlers.py @@ -286,8 +286,9 @@ def test_artifact_post_request(self): # Wait until the job is completed wait_for_prep_information_job(1) # Check that the delete function has been actually called - obs = r_client.get(loads(r_client.get('prep_template_1'))['job_id']) - self.assertIn('Cannot delete artifact 2', obs) + job = ProcessingJob(loads(r_client.get('prep_template_1'))['job_id']) + self.assertEqual(job.status, 'error') + self.assertIn('Cannot delete artifact 2', job.log.msg) def test_artifact_patch_request(self): a = Artifact(1) diff --git a/qiita_pet/handlers/study_handlers/tests/test_sample_template.py b/qiita_pet/handlers/study_handlers/tests/test_sample_template.py index 3cb150c14..926370656 100644 --- a/qiita_pet/handlers/study_handlers/tests/test_sample_template.py +++ b/qiita_pet/handlers/study_handlers/tests/test_sample_template.py @@ -7,10 +7,10 @@ # ----------------------------------------------------------------------------- from unittest import main from json import loads -from time import sleep -from qiita_core.qiita_settings import r_client +from qiita_core.qiita_settings import r_client +from qiita_core.testing import wait_for_processing_job from qiita_pet.test.tornado_test_base import TestHandlerBase from qiita_pet.handlers.study_handlers.sample_template import ( _build_sample_summary) @@ -71,11 +71,7 @@ def test_delete_sample_template(self): # Wait until the job has completed obs = r_client.get('sample_template_1') - self.assertIsNotNone(obs) - redis_info = loads(r_client.get(loads(obs)['job_id'])) - while redis_info['status_msg'] == 'Running': - sleep(0.5) - redis_info = loads(r_client.get(loads(obs)['job_id'])) + wait_for_processing_job(loads(obs)['job_id']) class TestSampleAJAXReadOnly(TestHandlerBase): diff --git a/qiita_pet/test/test_prep_template.py b/qiita_pet/test/test_prep_template.py index 3dab40dde..a6d1d9358 100644 --- a/qiita_pet/test/test_prep_template.py +++ b/qiita_pet/test/test_prep_template.py @@ -69,9 +69,8 @@ def test_delete(self): self.assertEqual(response.code, 200) exp = { "status": "error", - "message": "Couldn't remove prep template: Cannot remove prep " - "template 1 because it has an artifact associated " - "with it"} + "message": "Cannot remove prep template 1 because it has an " + "artifact associated with it"} self.assertEqual(loads(response.body), exp) diff --git a/qiita_pet/test/tornado_test_base.py b/qiita_pet/test/tornado_test_base.py index 430c96514..1ad32891a 100644 --- a/qiita_pet/test/tornado_test_base.py +++ b/qiita_pet/test/tornado_test_base.py @@ -18,6 +18,7 @@ from qiita_pet.handlers.base_handlers import BaseHandler from qiita_db.environment_manager import clean_test_environment from qiita_db.user import User +from qiita_core.qiita_settings import r_client class TestHandlerBase(AsyncHTTPTestCase): @@ -32,6 +33,7 @@ def get_app(self): @classmethod def tearDownClass(cls): clean_test_environment() + r_client.flushdb() # helpers from http://www.peterbe.com/plog/tricks-asynchttpclient-tornado def get(self, url, data=None, headers=None, doseq=True): diff --git a/qiita_ware/context.py b/qiita_ware/context.py deleted file mode 100644 index 949ee51fb..000000000 --- a/qiita_ware/context.py +++ /dev/null @@ -1,178 +0,0 @@ -# ----------------------------------------------------------------------------- -# Copyright (c) 2014--, The Qiita Development Team. -# -# Distributed under the terms of the BSD 3-clause License. -# -# The full license is in the file LICENSE, distributed with this software. -# ----------------------------------------------------------------------------- - -from subprocess import Popen, PIPE -from uuid import uuid4 -from functools import partial -from time import sleep - -from qiita_core.qiita_settings import r_client - -from .exceptions import ComputeError - - -def system_call(cmd): - """Call cmd and return (stdout, stderr, return_value). - - cmd: can be either a string containing the command to be run, or a - sequence of strings that are the tokens of the command. - - This function is ported from QIIME (http://www.qiime.org), previously - named qiime_system_call. QIIME is a GPL project, but we obtained permission - from the authors of this function to port it to pyqi (and keep it under - pyqi's BSD license). - """ - proc = Popen(cmd, - universal_newlines=True, - shell=True, - stdout=PIPE, - stderr=PIPE) - # communicate pulls all stdout/stderr from the PIPEs to - # avoid blocking -- don't remove this line! - stdout, stderr = proc.communicate() - return_value = proc.returncode - - if return_value != 0: - raise ComputeError("Failed to execute: %s\nstdout: %s\nstderr: %s" % - (cmd, stdout, stderr)) - - return stdout, stderr, return_value - - -class Dispatch(object): - def __init__(self): - pass - - -def _redis_wrap(f, redis_deets, *args, **kwargs): - """Wrap something to compute, and notify about state - - At the end, sets the key job_id with the serialized payload result. The - payload consists of: - - {'job_id': uuid, - 'status_msg': {'Success', 'Failed'}, - 'return': up to f} - - The set key will expire in 7 days. - - The result is also pushed to the corresponding messages key in redis_deets, - as well as published on the corresponding pubsub key. - - Parameters - ---------- - f : function - A function to execute - redis_deets : dict - Redis details, specifically {'job_id': uuid, - 'pubsub': key to publish on, - 'messages': key to push messages to} - """ - def _deposit_payload(redis_deets, payload): - """Drop messages into redis - - This is being defined inline as we need to use it multiple times, and - for an undiagnosed reason, having this function call it as a first - class function does not work. - """ - from json import dumps - from qiita_core.qiita_settings import r_client - - job_id = redis_deets['job_id'] - pubsub = redis_deets['pubsub'] - messages = redis_deets['messages'] - - serialized = dumps(payload) - - # First, we need to push the message on to the messages queue which is - # in place in the event of a race-condition where a websocket client - # may not be already listening to the pubsub. - r_client.rpush(messages, serialized) - - # Next, in support of our "normal" and desired means of communicating, - # we "publish" our payload. Anyone listening on the pubsub will see - # this and fire an event (e.g., WebSocketHandler.callback) - r_client.publish(pubsub, serialized) - - # Finally, we dump the payload keyed by job ID so that subsequent - # handlers who are not listening on the channel can examine the results - r_client.set(job_id, serialized, ex=86400 * 7) # expire at 1 week - - job_id = redis_deets['job_id'] - payload = {'job_id': job_id, 'status_msg': 'Running', 'return': None} - - _deposit_payload(redis_deets, payload) - try: - payload['return'] = f(*args, **kwargs) - payload['status_msg'] = 'Success' - except Exception: - import sys - import traceback - payload['return'] = repr(traceback.format_exception(*sys.exc_info())) - payload['status_msg'] = 'Failed' - finally: - _deposit_payload(redis_deets, payload) - - -def _submit(ctx, channel, f, *args, **kwargs): - """Submit a function to a cluster - - The work is submitted to the context, and a UUID describing the job is - returned. On completion, regardless of success or fail, the status of the - job will be set in `r_client` under the key of the UUID, and additionally, - the UUID will be published to the channel 'qiita-compute-complete'. - - Parameters - ---------- - ctx : Dispatch - A Dispatch object to submit through - channel : str - channel to submit the run to - f : function - The function to execute. Any returns from this function will be - serialized and deposited into Redis using the uuid for a key. - args : tuple or None - Any args for ``f`` - kwargs : dict or None - Any kwargs for ``f`` - - Returns - ------- - uuid - The job ID - """ - uuid = str(uuid4()) - redis_deets = {'job_id': uuid, 'pubsub': channel, - 'messages': channel + ':messages'} - ctx.submit_async(_redis_wrap, f, redis_deets, *args, **kwargs) - return uuid - - -# likely want this in qiita_ware.__init__ -context = Dispatch() -submit = partial(_submit, context) - - -def safe_submit(*args, **kwargs): - """Safe wraper for the submit function - - There are cases in which a race condition may occur: submit returns the - job id but moi hasn't submitted the job. In some cases this is not - acceptable, so this wrapper makes sure that the job_id - is returned only once the job has already been submitted. - - From previous tests, the while loop is executed ~2 times, so there is not - much time lost in here - """ - job_id = submit(*args, **kwargs) - payload = r_client.get(job_id) - while not payload: - sleep(0.005) - payload = r_client.get(job_id) - - return job_id diff --git a/qiita_ware/dispatchable.py b/qiita_ware/dispatchable.py deleted file mode 100644 index bd5cc81aa..000000000 --- a/qiita_ware/dispatchable.py +++ /dev/null @@ -1,160 +0,0 @@ -# ----------------------------------------------------------------------------- -# Copyright (c) 2014--, The Qiita Development Team. -# -# Distributed under the terms of the BSD 3-clause License. -# -# The full license is in the file LICENSE, distributed with this software. -# ----------------------------------------------------------------------------- - - -def update_sample_template(study_id, fp): - """Updates a sample template - - Parameters - ---------- - study_id : int - Study id whose template is going to be updated - fp : str - The file path to the template file - - Returns - ------- - dict of {str: str} - A dict of the form {'status': str, 'message': str} - """ - import warnings - from os import remove - from qiita_db.metadata_template.util import load_template_to_dataframe - from qiita_db.metadata_template.sample_template import SampleTemplate - - msg = '' - status = 'success' - - try: - with warnings.catch_warnings(record=True) as warns: - # deleting previous uploads and inserting new one - st = SampleTemplate(study_id) - df = load_template_to_dataframe(fp) - st.extend_and_update(df) - remove(fp) - - # join all the warning messages into one. Note that this info - # will be ignored if an exception is raised - if warns: - msg = '\n'.join(set(str(w.message) for w in warns)) - status = 'warning' - except Exception as e: - status = 'danger' - msg = str(e) - - return {'status': status, 'message': msg} - - -def delete_sample_template(study_id): - """Delete a sample template - - Parameters - ---------- - study_id : int - Study id whose template is going to be deleted - - Returns - ------- - dict of {str: str} - A dict of the form {'status': str, 'message': str} - """ - from qiita_db.metadata_template.sample_template import SampleTemplate - - msg = '' - status = 'success' - try: - SampleTemplate.delete(study_id) - except Exception as e: - status = 'danger' - msg = str(e) - - return {'status': status, 'message': msg} - - -def update_prep_template(prep_id, fp): - """Updates a prep template - - Parameters - ---------- - prep_id : int - Prep template id to be updated - fp : str - The file path to the template file - - Returns - ------- - dict of {str: str} - A dict of the form {'status': str, 'message': str} - """ - import warnings - from os import remove - from qiita_db.metadata_template.util import load_template_to_dataframe - from qiita_db.metadata_template.prep_template import PrepTemplate - - msg = '' - status = 'success' - - prep = PrepTemplate(prep_id) - - try: - with warnings.catch_warnings(record=True) as warns: - df = load_template_to_dataframe(fp) - prep.extend(df) - prep.update(df) - remove(fp) - - if warns: - msg = '\n'.join(set(str(w.message) for w in warns)) - status = 'warning' - except Exception as e: - status = 'danger' - msg = str(e) - - return {'status': status, 'message': msg} - - -def delete_sample_or_column(obj_class, obj_id, sample_or_col, name): - """Deletes a sample or a column from the metadata - - Parameters - ---------- - obj_class : {SampleTemplate, PrepTemplate} - The metadata template subclass - obj_id : int - The template id - sample_or_col : {"samples", "columns"} - Which resource are we deleting. Either "samples" or "columns" - name : str - The name of the resource to be deleted - - Returns - ------- - dict of {str: str} - A dict of the form {'status': str, 'message': str} - """ - st = obj_class(obj_id) - - if sample_or_col == 'columns': - del_func = st.delete_column - elif sample_or_col == 'samples': - del_func = st.delete_sample - else: - return {'status': 'danger', - 'message': 'Unknown value "%s". Choose between "samples" ' - 'and "columns"' % sample_or_col} - - msg = '' - status = 'success' - - try: - del_func(name) - except Exception as e: - status = 'danger' - msg = str(e) - - return {'status': status, 'message': msg} diff --git a/qiita_ware/private_plugin.py b/qiita_ware/private_plugin.py index 94aed8600..cb51f7a92 100644 --- a/qiita_ware/private_plugin.py +++ b/qiita_ware/private_plugin.py @@ -6,13 +6,15 @@ # The full license is in the file LICENSE, distributed with this software. # ----------------------------------------------------------------------------- -from json import dumps +from json import dumps, loads from sys import exc_info from time import sleep -import traceback from os import remove +import traceback +import warnings import qiita_db as qdb +from qiita_core.qiita_settings import r_client from qiita_ware.commands import submit_VAMPS, submit_EBI from qiita_ware.metadata_pipeline import ( create_templates_from_qiime_mapping_file) @@ -83,8 +85,8 @@ def submit_to_VAMPS(job): job._set_status('success') -def copy_artifact(job): - """Creates a copy of an artifact +def submit_to_EBI(job): + """Submit a study to EBI Parameters ---------- @@ -93,15 +95,14 @@ def copy_artifact(job): """ with qdb.sql_connection.TRN: param_vals = job.parameters.values - orig_artifact = qdb.artifact.Artifact(param_vals['artifact']) - prep_template = qdb.metadata_template.prep_template.PrepTemplate( - param_vals['prep_template']) - qdb.artifact.Artifact.copy(orig_artifact, prep_template) + artifact_id = int(param_vals['artifact']) + submission_type = param_vals['submission_type'] + submit_EBI(artifact_id, submission_type, False) job._set_status('success') -def submit_to_EBI(job): - """Submit a study to EBI +def copy_artifact(job): + """Creates a copy of an artifact Parameters ---------- @@ -110,9 +111,10 @@ def submit_to_EBI(job): """ with qdb.sql_connection.TRN: param_vals = job.parameters.values - artifact_id = int(param_vals['artifact']) - submission_type = param_vals['submission_type'] - submit_EBI(artifact_id, submission_type, False) + orig_artifact = qdb.artifact.Artifact(param_vals['artifact']) + prep_template = qdb.metadata_template.prep_template.PrepTemplate( + param_vals['prep_template']) + qdb.artifact.Artifact.copy(orig_artifact, prep_template) job._set_status('success') @@ -145,26 +147,170 @@ def create_sample_template(job): is_mapping_file = params['is_mapping_file'] data_type = params['data_type'] - if is_mapping_file: - create_templates_from_qiime_mapping_file(fp, study, data_type) + with warnings.catch_warnings(record=True) as warns: + if is_mapping_file: + create_templates_from_qiime_mapping_file(fp, study, data_type) + else: + qdb.metadata_template.sample_template.SampleTemplate.create( + qdb.metadata_template.util.load_template_to_dataframe(fp), + study) + remove(fp) + + if warns: + msg = '\n'.join(set(str(w.message) for w in warns)) + r_client.set("sample_template_%s" % study.id, + dumps({'job_id': job.id, 'alert_type': 'warning', + 'alert_msg': msg})) + + job._set_status('success') + + +def update_sample_template(job): + """Updates a sample template + + Parameters + ---------- + job : qiita_db.processing_job.ProcessingJob + The processing job performing the task + """ + with qdb.sql_connection.TRN: + param_vals = job.parameters.values + study_id = param_vals['study'] + fp = param_vals['template_fp'] + with warnings.catch_warnings(record=True) as warns: + st = qdb.metadata_template.sample_template.SampleTemplate(study_id) + df = qdb.metadata_template.util.load_template_to_dataframe(fp) + st.extend_and_update(df) + remove(fp) + + # Join all the warning messages into one. Note that this info + # will be ignored if an exception is raised + if warns: + msg = '\n'.join(set(str(w.message) for w in warns)) + r_client.set("sample_template_%s" % study_id, + dumps({'job_id': job.id, 'alert_type': 'warning', + 'alert_msg': msg})) + + job._set_status('success') + + +def delete_sample_template(job): + """Deletes a sample template + + Parameters + ---------- + job : qiita_db.processing_job.ProcessingJob + The processing job performing the task + """ + with qdb.sql_connection.TRN: + qdb.metadata_template.sample_template.SampleTemplate.delete( + job.parameters.values['study']) + job._set_status('success') + + +def update_prep_template(job): + """Updates a prep template + + Parameters + ---------- + job : qiita_db.processing_job.ProcessingJob + The processing job performing the task + """ + with qdb.sql_connection.TRN: + param_vals = job.parameters.values + prep_id = param_vals['prep_template'] + fp = param_vals['template_fp'] + + prep = qdb.metadata_template.prep_template.PrepTemplate(prep_id) + with warnings.catch_warnings(record=True) as warns: + df = qdb.metadata_template.util.load_template_to_dataframe(fp) + prep.extend_and_update(df) + remove(fp) + + # Join all the warning messages into one. Note that this info + # will be ignored if an exception is raised + if warns: + msg = '\n'.join(set(str(w.message) for w in warns)) + r_client.set("prep_template_%s" % prep_id, + dumps({'job_id': job.id, 'alert_type': 'warning', + 'alert_msg': msg})) + + job._set_status('success') + + +def delete_sample_or_column(job): + """Deletes a sample or a column from the metadata + + Parameters + ---------- + job : qiita_db.processing_job.ProcessingJob + The processing job performing the task + """ + with qdb.sql_connection.TRN: + param_vals = job.parameters.values + obj_class = param_vals['obj_class'] + obj_id = param_vals['obj_id'] + sample_or_col = param_vals['sample_or_col'] + name = param_vals['name'] + + if obj_class == 'SampleTemplate': + constructor = qdb.metadata_template.sample_template.SampleTemplate + elif obj_class == 'PrepTemplate': + constructor = qdb.metadata_template.prep_template.PrepTemplate + else: + raise ValueError('Unknown value "%s". Choose between ' + '"SampleTemplate" and "PrepTemplate"' % obj_class) + + if sample_or_col == 'columns': + del_func = constructor(obj_id).delete_column + elif sample_or_col == 'samples': + del_func = constructor(obj_id).delete_sample + else: + raise ValueError('Unknown value "%s". Choose between "samples" ' + 'and "columns"' % sample_or_col) + + del_func(name) + job._set_status('success') + + +def complete_job(job): + """Deletes a sample or a column from the metadata + + Parameters + ---------- + job : qiita_db.processing_job.ProcessingJob + The processing job performing the task + """ + with qdb.sql_connection.TRN: + param_vals = job.parameters.values + payload = loads(param_vals['payload']) + if payload['success']: + artifacts = payload['artifacts'] + error = None else: - qdb.metadata_template.sample_template.SampleTemplate.create( - qdb.metadata_template.util.load_template_to_dataframe(fp), - study) - remove(fp) + artifacts = None + error = payload['error'] + c_job = qdb.processing_job.ProcessingJob(param_vals['job_id']) + try: + c_job.complete(payload['success'], artifacts, error) + except: + c_job._set_error(traceback.format_exception(*exc_info())) job._set_status('success') -TASK_DICT = { - 'build_analysis_files': build_analysis_files, - 'release_validators': release_validators, - 'submit_to_VAMPS': submit_to_VAMPS, - 'copy_artifact': copy_artifact, - 'submit_to_EBI': submit_to_EBI, - 'delete_artifact': delete_artifact, - 'create_sample_template': create_sample_template, -} +TASK_DICT = {'build_analysis_files': build_analysis_files, + 'release_validators': release_validators, + 'submit_to_VAMPS': submit_to_VAMPS, + 'submit_to_EBI': submit_to_EBI, + 'copy_artifact': copy_artifact, + 'delete_artifact': delete_artifact, + 'create_sample_template': create_sample_template, + 'update_sample_template': update_sample_template, + 'delete_sample_template': delete_sample_template, + 'update_prep_template': update_prep_template, + 'delete_sample_or_column': delete_sample_or_column, + 'complete_job': complete_job} def private_task(job_id): @@ -185,6 +331,8 @@ def private_task(job_id): try: TASK_DICT[task_name](job) - except Exception: - job.complete(False, error="Error executing private task: %s" - % traceback.format_exception(*exc_info())) + except Exception as e: + log_msg = "Error on job %s: %s" % ( + job.id, ''.join(traceback.format_exception(*exc_info()))) + le = qdb.logger.LogEntry.create('Runtime', log_msg) + job.complete(False, error="Error (log id: %d): %s" % (le.id, e)) diff --git a/qiita_ware/test/test_dispatchable.py b/qiita_ware/test/test_dispatchable.py deleted file mode 100644 index 5b0e53f8f..000000000 --- a/qiita_ware/test/test_dispatchable.py +++ /dev/null @@ -1,138 +0,0 @@ -# ----------------------------------------------------------------------------- -# Copyright (c) 2014--, The Qiita Development Team. -# -# Distributed under the terms of the BSD 3-clause License. -# -# The full license is in the file LICENSE, distributed with this software. -# ----------------------------------------------------------------------------- - -from unittest import TestCase, main -from tempfile import mkstemp -from os import close, remove -from os.path import exists - -import pandas as pd -import numpy.testing as npt - -from qiita_core.util import qiita_test_checker -from qiita_ware.dispatchable import ( - update_sample_template, delete_sample_template, - update_prep_template, delete_sample_or_column) -from qiita_db.study import Study -from qiita_db.exceptions import QiitaDBWarning -from qiita_db.metadata_template.prep_template import PrepTemplate -from qiita_db.metadata_template.sample_template import SampleTemplate - - -@qiita_test_checker() -class TestDispatchable(TestCase): - def setUp(self): - fd, self.fp = mkstemp(suffix=".txt") - close(fd) - with open(self.fp, 'w') as f: - f.write("sample_name\tnew_col\n" - "1.SKD6.640190\tnew_vale") - - self._clean_up_files = [self.fp] - - def tearDown(self): - for fp in self._clean_up_files: - if exists(fp): - remove(fp) - - def test_update_sample_template(self): - obs = update_sample_template(1, self.fp) - exp = {'status': 'warning', - 'message': ("Sample names were already prefixed with the study " - "id.\nThe following columns have been added to the " - "existing template: new_col\nThere are no " - "differences between the data stored in the DB and " - "the new data provided")} - self.assertItemsEqual(obs['message'].split('\n'), - exp['message'].split('\n')) - self.assertEqual(obs['status'], exp['status']) - - def test_delete_sample_template(self): - obs = delete_sample_template(1) - exp = {'status': 'danger', - 'message': 'Sample template cannot be erased because there ' - 'are prep templates associated.'} - self.assertEqual(obs, exp) - - def test_update_prep_template(self): - obs = update_prep_template(1, self.fp) - exp = {'status': 'warning', - 'message': 'Sample names were already prefixed with the study ' - 'id.\nThe following columns have been added to the ' - 'existing template: new_col\nThere are no ' - 'differences between the data stored in the DB and ' - 'the new data provided'} - self.assertItemsEqual(obs['message'].split('\n'), - exp['message'].split('\n')) - self.assertEqual(obs['status'], exp['status']) - - def test_delete_sample_or_column(self): - st = SampleTemplate(1) - - # Delete a sample template column - obs = delete_sample_or_column(SampleTemplate, 1, "columns", - "season_environment") - exp = {'status': "success", 'message': ""} - self.assertEqual(obs, exp) - self.assertNotIn('season_environment', st.categories()) - - # Delete a sample template sample - need to add one sample that we - # will remove - npt.assert_warns( - QiitaDBWarning, st.extend, - pd.DataFrame.from_dict({'Sample1': {'taxon_id': '9606'}}, - orient='index', dtype=str)) - self.assertIn('1.Sample1', st.keys()) - obs = delete_sample_or_column(SampleTemplate, 1, "samples", - "1.Sample1") - exp = {'status': "success", 'message': ""} - self.assertEqual(obs, exp) - self.assertNotIn('1.Sample1', st.keys()) - - # Delete a prep template column - pt = PrepTemplate(2) - - obs = delete_sample_or_column(PrepTemplate, 2, "columns", - "target_subfragment") - exp = {'status': "success", 'message': ""} - self.assertEqual(obs, exp) - self.assertNotIn('target_subfragment', pt.categories()) - - # Delte a prep template sample - metadata = pd.DataFrame.from_dict( - {'1.SKB8.640193': {'barcode': 'GTCCGCAAGTTA', - 'primer': 'GTGCCAGCMGCCGCGGTAA'}, - '1.SKD8.640184': {'barcode': 'CGTAGAGCTCTC', - 'primer': 'GTGCCAGCMGCCGCGGTAA'}}, - orient='index', dtype=str) - pt = npt.assert_warns(QiitaDBWarning, PrepTemplate.create, metadata, - Study(1), "16S") - obs = delete_sample_or_column(PrepTemplate, pt.id, "samples", - '1.SKD8.640184') - exp = {'status': "success", 'message': ""} - self.assertEqual(obs, exp) - self.assertNotIn('1.SKD8.640184', pt.categories()) - - # Exception - obs = delete_sample_or_column(PrepTemplate, 2, "samples", - "1.SKM9.640192") - exp = {'status': "danger", - 'message': "Prep info file '2' has files attached, you cannot " - "delete samples."} - self.assertEqual(obs, exp) - - # No "samples" or "columns" - obs = delete_sample_or_column(PrepTemplate, 2, "not_samples", "NOP") - exp = {'status': 'danger', - 'message': 'Unknown value "not_samples". Choose between ' - '"samples" and "columns"'} - self.assertEqual(obs, exp) - - -if __name__ == '__main__': - main() diff --git a/qiita_ware/test/test_private_plugin.py b/qiita_ware/test/test_private_plugin.py index 68c054088..82a72ad6d 100644 --- a/qiita_ware/test/test_private_plugin.py +++ b/qiita_ware/test/test_private_plugin.py @@ -7,20 +7,27 @@ # ----------------------------------------------------------------------------- from unittest import TestCase, main - -import pandas as pd -from os import close, remove from os.path import join, dirname, abspath, exists +from os import close, remove from tempfile import mkstemp +from json import loads, dumps + +import pandas as pd +import numpy.testing as npt from qiita_core.util import qiita_test_checker -from qiita_db.software import Software, Parameters +from qiita_core.qiita_settings import r_client +from qiita_db.software import Software, Parameters, Command from qiita_db.processing_job import ProcessingJob from qiita_db.user import User -from qiita_db.study import Study -from qiita_db.artifact import Artifact +from qiita_db.study import Study, StudyPerson +from qiita_db.metadata_template.sample_template import SampleTemplate from qiita_db.metadata_template.prep_template import PrepTemplate +from qiita_db.exceptions import QiitaDBWarning +from qiita_db.artifact import Artifact from qiita_db.exceptions import QiitaDBUnknownIDError +from qiita_db.util import get_count +from qiita_db.logger import LogEntry from qiita_ware.private_plugin import private_task @@ -40,10 +47,12 @@ def tearDown(self): if exists(fp): remove(fp) - def _create_job(self, cmd, values_dict): + r_client.flushdb() + + def _create_job(self, cmd_name, values_dict): user = User('test@foo.bar') qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') - cmd = qiita_plugin.get_command(cmd) + cmd = qiita_plugin.get_command(cmd_name) params = Parameters.load(cmd, values_dict=values_dict) job = ProcessingJob.create(user, params) job._set_status('queued') @@ -91,6 +100,7 @@ def test_delete_artifact(self): Artifact(3) def test_create_sample_template(self): + # Test error job = self._create_job('create_sample_template', { 'fp': self.fp, 'study_id': 1, 'is_mapping_file': False, 'data_type': None}) @@ -99,6 +109,33 @@ def test_create_sample_template(self): self.assertIn("The 'SampleTemplate' object with attributes (id: 1) " "already exists.", job.log.msg) + # Test success with a warning + info = {"timeseries_type_id": '1', + "metadata_complete": 'true', + "mixs_compliant": 'true', + "number_samples_collected": 25, + "number_samples_promised": 28, + "study_alias": "TDST", + "study_description": "Test create sample template", + "study_abstract": "Test create sample template", + "principal_investigator_id": StudyPerson(1)} + study = Study.create(User('test@foo.bar'), + "Create Sample Template test", info) + job = self._create_job('create_sample_template', + {'fp': self.fp, 'study_id': study.id, + 'is_mapping_file': False, 'data_type': None}) + private_task(job.id) + self.assertEqual(job.status, 'success') + obs = r_client.get("sample_template_%d" % study.id) + self.assertIsNotNone(obs) + obs = loads(obs) + self.assertItemsEqual(obs, ['job_id', 'alert_type', 'alert_msg']) + self.assertEqual(obs['job_id'], job.id) + self.assertEqual(obs['alert_type'], 'warning') + self.assertIn( + 'Some functionality will be disabled due to missing columns:', + obs['alert_msg']) + def test_create_sample_template_nonutf8(self): fp = join(dirname(abspath(__file__)), 'test_data', 'sample_info_utf8_error.txt') @@ -114,6 +151,229 @@ def test_create_sample_template_nonutf8(self): '🐾: "🐾collection_timestamp" = (0, 13)', job.log.msg) + def test_update_sample_template(self): + fd, fp = mkstemp(suffix=".txt") + close(fd) + with open(fp, 'w') as f: + f.write("sample_name\tnew_col\n1.SKD6.640190\tnew_value") + self._clean_up_files.append(fp) + + job = self._create_job('update_sample_template', + {'study': 1, 'template_fp': fp}) + private_task(job.id) + self.assertEqual(job.status, 'success') + self.assertEqual(SampleTemplate(1)['1.SKD6.640190']['new_col'], + 'new_value') + obs = r_client.get("sample_template_1") + self.assertIsNotNone(obs) + obs = loads(obs) + self.assertItemsEqual(obs, ['job_id', 'alert_type', 'alert_msg']) + self.assertEqual(obs['job_id'], job.id) + self.assertEqual(obs['alert_type'], 'warning') + self.assertIn('The following columns have been added to the existing ' + 'template: new_col', obs['alert_msg']) + + def test_delete_sample_template(self): + # Error case + job = self._create_job('delete_sample_template', {'study': 1}) + private_task(job.id) + self.assertEqual(job.status, 'error') + self.assertIn("Sample template cannot be erased because there are " + "prep templates associated", job.log.msg) + + # Success case + info = {"timeseries_type_id": '1', + "metadata_complete": 'true', + "mixs_compliant": 'true', + "number_samples_collected": 25, + "number_samples_promised": 28, + "study_alias": "TDST", + "study_description": "Test delete sample template", + "study_abstract": "Test delete sample template", + "principal_investigator_id": StudyPerson(1)} + study = Study.create(User('test@foo.bar'), + "Delete Sample Template test", info) + metadata = pd.DataFrame.from_dict( + {'Sample1': {'physical_specimen_location': 'location1', + 'physical_specimen_remaining': 'true', + 'dna_extracted': 'true', + 'sample_type': 'type1', + 'collection_timestamp': '2014-05-29 12:24:15', + 'host_subject_id': 'NotIdentified', + 'Description': 'Test Sample 1', + 'latitude': '42.42', + 'longitude': '41.41', + 'taxon_id': '9606', + 'scientific_name': 'homo sapiens'}}, + orient='index', dtype=str) + SampleTemplate.create(metadata, study) + + job = self._create_job('delete_sample_template', {'study': study.id}) + private_task(job.id) + self.assertEqual(job.status, 'success') + self.assertFalse(SampleTemplate.exists(study.id)) + + def test_update_prep_template(self): + fd, fp = mkstemp(suffix=".txt") + close(fd) + with open(fp, 'w') as f: + f.write("sample_name\tnew_col\n1.SKD6.640190\tnew_value") + job = self._create_job('update_prep_template', {'prep_template': 1, + 'template_fp': fp}) + private_task(job.id) + self.assertEqual(job.status, 'success') + self.assertEqual(PrepTemplate(1)['1.SKD6.640190']['new_col'], + 'new_value') + obs = r_client.get("prep_template_1") + self.assertIsNotNone(obs) + obs = loads(obs) + self.assertItemsEqual(obs, ['job_id', 'alert_type', 'alert_msg']) + self.assertEqual(obs['job_id'], job.id) + self.assertEqual(obs['alert_type'], 'warning') + self.assertIn('The following columns have been added to the existing ' + 'template: new_col', obs['alert_msg']) + + def test_delete_sample_or_column(self): + st = SampleTemplate(1) + + # Delete a sample template column + job = self._create_job('delete_sample_or_column', + {'obj_class': 'SampleTemplate', 'obj_id': 1, + 'sample_or_col': 'columns', + 'name': 'season_environment'}) + private_task(job.id) + self.assertEqual(job.status, 'success') + self.assertNotIn('season_environment', st.categories()) + + # Delete a sample template sample - need to add one + # sample that we will remove + npt.assert_warns( + QiitaDBWarning, st.extend, + pd.DataFrame.from_dict({'Sample1': {'taxon_id': '9606'}}, + orient='index', dtype=str)) + self.assertIn('1.Sample1', st.keys()) + job = self._create_job('delete_sample_or_column', + {'obj_class': 'SampleTemplate', 'obj_id': 1, + 'sample_or_col': 'samples', + 'name': '1.Sample1'}) + private_task(job.id) + self.assertEqual(job.status, 'success') + self.assertNotIn('1.Sample1', st.keys()) + + # Delete a prep template column + pt = PrepTemplate(1) + job = self._create_job('delete_sample_or_column', + {'obj_class': 'PrepTemplate', 'obj_id': 1, + 'sample_or_col': 'columns', + 'name': 'target_subfragment'}) + private_task(job.id) + self.assertEqual(job.status, 'success') + self.assertNotIn('target_subfragment', pt.categories()) + + # Delete a prep template sample + metadata = pd.DataFrame.from_dict( + {'1.SKB8.640193': {'barcode': 'GTCCGCAAGTTA', + 'primer': 'GTGCCAGCMGCCGCGGTAA'}, + '1.SKD8.640184': {'barcode': 'CGTAGAGCTCTC', + 'primer': 'GTGCCAGCMGCCGCGGTAA'}}, + orient='index', dtype=str) + pt = npt.assert_warns(QiitaDBWarning, PrepTemplate.create, metadata, + Study(1), "16S") + job = self._create_job('delete_sample_or_column', + {'obj_class': 'PrepTemplate', 'obj_id': pt.id, + 'sample_or_col': 'samples', + 'name': '1.SKD8.640184'}) + private_task(job.id) + self.assertNotIn('1.SKD8.640184', pt.keys()) + + # Test exceptions + job = self._create_job('delete_sample_or_column', + {'obj_class': 'UnknownClass', 'obj_id': 1, + 'sample_or_col': 'columns', 'name': 'column'}) + private_task(job.id) + self.assertEqual(job.status, 'error') + self.assertIn('Unknown value "UnknownClass". Choose between ' + '"SampleTemplate" and "PrepTemplate"', job.log.msg) + + job = self._create_job('delete_sample_or_column', + {'obj_class': 'SampleTemplate', 'obj_id': 1, + 'sample_or_col': 'unknown', 'name': 'column'}) + private_task(job.id) + self.assertEqual(job.status, 'error') + self.assertIn('Unknown value "unknown". Choose between "samples" ' + 'and "columns"', job.log.msg) + + # This is a long test but it includes the 3 important cases that need + # to be tested on this function (job success, job error, and internal error + # when completing the job) + def test_complete_job(self): + # Complete success + pt = npt.assert_warns( + QiitaDBWarning, PrepTemplate.create, + pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), + Study(1), '16S') + c_job = ProcessingJob.create( + User('test@foo.bar'), + Parameters.load( + Command.get_validator('BIOM'), + values_dict={'template': pt.id, + 'files': dumps({'BIOM': ['file']}), + 'artifact_type': 'BIOM'})) + c_job._set_status('running') + fd, fp = mkstemp(suffix='_table.biom') + close(fd) + with open(fp, 'w') as f: + f.write('\n') + self._clean_up_files.append(fp) + exp_artifact_count = get_count('qiita.artifact') + 1 + payload = dumps( + {'success': True, 'error': '', + 'artifacts': {'OTU table': {'filepaths': [(fp, 'biom')], + 'artifact_type': 'BIOM'}}}) + job = self._create_job('complete_job', {'job_id': c_job.id, + 'payload': payload}) + private_task(job.id) + self.assertEqual(job.status, 'success') + self.assertEqual(c_job.status, 'success') + self.assertEqual(get_count('qiita.artifact'), exp_artifact_count) + + # Complete job error + payload = dumps({'success': False, 'error': 'Job failure'}) + job = self._create_job( + 'complete_job', {'job_id': 'bcc7ebcd-39c1-43e4-af2d-822e3589f14d', + 'payload': payload}) + private_task(job.id) + self.assertEqual(job.status, 'success') + c_job = ProcessingJob('bcc7ebcd-39c1-43e4-af2d-822e3589f14d') + self.assertEqual(c_job.status, 'error') + self.assertEqual(c_job.log, LogEntry.newest_records(numrecords=1)[0]) + self.assertEqual(c_job.log.msg, 'Job failure') + + # Complete internal error + pt = npt.assert_warns( + QiitaDBWarning, PrepTemplate.create, + pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), + Study(1), '16S') + c_job = ProcessingJob.create( + User('test@foo.bar'), + Parameters.load( + Command.get_validator('BIOM'), + values_dict={'template': pt.id, + 'files': dumps({'BIOM': ['file']}), + 'artifact_type': 'BIOM'})) + c_job._set_status('running') + fp = '/surprised/if/this/path/exists.biom' + payload = dumps( + {'success': True, 'error': '', + 'artifacts': {'OTU table': {'filepaths': [(fp, 'biom')], + 'artifact_type': 'BIOM'}}}) + job = self._create_job('complete_job', {'job_id': c_job.id, + 'payload': payload}) + private_task(job.id) + self.assertEqual(job.status, 'success') + self.assertEqual(c_job.status, 'error') + self.assertIn('No such file or directory', c_job.log.msg) + if __name__ == '__main__': main() diff --git a/scripts/qiita b/scripts/qiita index 68b668c05..ba8911bfd 100755 --- a/scripts/qiita +++ b/scripts/qiita @@ -27,7 +27,6 @@ import qiita_db as qdb from qiita_core.qiita_settings import qiita_config, r_client from qiita_ware.ebi import EBISubmission from qiita_ware.commands import submit_EBI as _submit_EBI -from qiita_ware.context import system_call, ComputeError try: @@ -63,8 +62,8 @@ def webserver(build_docs): cmd = 'make -C %s html' % sphinx_fp print('Building documentation ...') try: - system_call(cmd) - except ComputeError as e: + qdb.processing_job._system_call(cmd) + except Exception as e: raise click.ClickException('Could not build documentation: %s' % str(e)) else: diff --git a/scripts/qiita-private b/scripts/qiita-private deleted file mode 100755 index 40c6bf414..000000000 --- a/scripts/qiita-private +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python - -# ----------------------------------------------------------------------------- -# Copyright (c) 2014--, The Qiita Development Team. -# -# Distributed under the terms of the BSD 3-clause License. -# -# The full license is in the file LICENSE, distributed with this software. -# ----------------------------------------------------------------------------- - -import click -import qiita_db as qdb - - -@click.group() -def qiita_private(): - pass - - -@qiita_private.command() -@click.argument('job_id', required=True, type=click.STRING) -@click.argument('payload', required=True, type=click.STRING) -def complete_job(job_id, payload): - qdb.commands.complete_job_cmd(job_id, payload) - - -if __name__ == '__main__': - qiita_private()