Skip to content
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ install:
- pip install https://github.com/qiita-spots/qiita_client/archive/master.zip
- pip install https://github.com/qiita-spots/qtp-biom/archive/master.zip
- export QIITA_SERVER_CERT=`pwd`/qiita_core/support_files/server.crt
- configure_biom --env-script "source ~/virtualenv/python2.7/bin/activate; export PATH=$HOME/miniconda3/bin/:$PATH; . activate qtp-biom" --server-cert $QIITA_SERVER_CERT
- configure_biom --env-script "export PATH=$HOME/miniconda3/bin/:$PATH; source activate qtp-biom" --server-cert $QIITA_SERVER_CERT
- source deactivate
- source activate qiita
before_script:
Expand Down
68 changes: 67 additions & 1 deletion qiita_db/handlers/artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@

from tornado.web import HTTPError
from collections import defaultdict
from json import loads
from json import loads, dumps

from qiita_core.qiita_settings import r_client
import qiita_db as qdb
from .oauth2 import OauthBaseHandler, authenticate_oauth

Expand Down Expand Up @@ -234,3 +235,68 @@ def post(self):
self.set_status(200, reason="Artifact type already exists")

self.finish()


class APIArtifactHandler(OauthBaseHandler):
@authenticate_oauth
def post(self):
user_email = self.get_argument('user_email')
job_id = self.get_argument('job_id', None)
prep_id = self.get_argument('prep_id', None)
atype = self.get_argument('artifact_type')
aname = self.get_argument('command_artifact_name', 'Name')
files = self.get_argument('files')

if job_id is None and prep_id is None:
raise HTTPError(
400, reason='You need to specify a job_id or a prep_id')
if job_id is not None and prep_id is not None:
raise HTTPError(
400, reason='You need to specify only a job_id or a prep_id')

user = qdb.user.User(user_email)
values = {
'files': files, 'artifact_type': atype, 'name': aname,
# leaving here in case we need to add a way to add an artifact
# directly to an analysis, for more information see
# ProcessingJob._complete_artifact_transformation
'analysis': None}
PJ = qdb.processing_job.ProcessingJob
if job_id is not None:
TN = qdb.sql_connection.TRN
job = PJ(job_id)
with TN:
sql = """SELECT command_output_id
FROM qiita.command_output
WHERE name = %s AND command_id = %s"""
TN.add(sql, [aname, job.command.id])
results = TN.execute_fetchflatten()
if len(results) < 1:
raise HTTPError(400, 'The command_artifact_name does not '
'exist in the command')
cmd_out_id = results[0]
provenance = {'job': job_id,
'cmd_out_id': cmd_out_id,
# direct_creation is a flag to avoid having to wait
# for the complete job to create the new artifact,
# which is normally ran during regular processing.
# Skipping is fine because we are adding an artifact
# to an existing job outside of regular processing
'direct_creation': True,
'name': aname}
values['provenance'] = dumps(provenance)
prep_id = job.input_artifacts[0].id
else:
prep_id = int(prep_id)

values['template'] = prep_id
cmd = qdb.software.Command.get_validator(atype)
params = qdb.software.Parameters.load(cmd, values_dict=values)
new_job = PJ.create(user, params, True)
new_job.submit()

r_client.set('prep_template_%d' % prep_id,
dumps({'job_id': new_job.id, 'is_qiita_job': True}))

self.write(new_job.id)
self.finish()
99 changes: 97 additions & 2 deletions qiita_db/handlers/tests/test_artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@
# -----------------------------------------------------------------------------

from unittest import main, TestCase
from json import loads
from json import loads, dumps
from functools import partial
from os.path import join, exists, isfile
from os import close, remove
from shutil import rmtree
from tempfile import mkstemp, mkdtemp
from json import dumps
from time import sleep

from tornado.web import HTTPError
import pandas as pd
Expand Down Expand Up @@ -288,5 +288,100 @@ def test_post(self):
self.assertEqual(obs.code, 200)


class APIArtifactHandlerTests(OauthTestingBase):
def setUp(self):
super(APIArtifactHandlerTests, self).setUp()
self._clean_up_files = []

def tearDown(self):
super(APIArtifactHandlerTests, self).tearDown()

for f in self._clean_up_files:
if exists(f):
remove(f)

def test_post(self):
# no header
obs = self.post('/qiita_db/artifact/', data={})
self.assertEqual(obs.code, 400)

fd, fp = mkstemp(suffix='_table.biom')
close(fd)
# renaming samples
et.update_ids({'S1': '1.SKB1.640202',
'S2': '1.SKD3.640198',
'S3': '1.SKM4.640180'}, inplace=True)
with biom_open(fp, 'w') as f:
et.to_hdf5(f, "test")
self._clean_up_files.append(fp)

# no job_id or prep_id
data = {'user_email': '[email protected]',
'artifact_type': 'BIOM',
'command_artifact_name': 'OTU table',
'files': dumps({'biom': [fp]})}

obs = self.post('/qiita_db/artifact/', headers=self.header, data=data)
self.assertEqual(obs.code, 400)
self.assertIn(
'You need to specify a job_id or a prep_id', str(obs.error))

# both job_id and prep_id defined
data['job_id'] = 'e5609746-a985-41a1-babf-6b3ebe9eb5a9'
data['prep_id'] = 'prep_id'
obs = self.post('/qiita_db/artifact/', headers=self.header, data=data)
self.assertEqual(obs.code, 400)
self.assertIn(
'You need to specify only a job_id or a prep_id', str(obs.error))

# make sure that all the plugins are on
qdb.util.activate_or_update_plugins(update=True)

# tests success by inserting a new artifact into an existing job
original_job = qdb.processing_job.ProcessingJob(data['job_id'])
input_artifact = original_job.input_artifacts[0]
self.assertEqual(len(input_artifact.children), 3)
# send the new data
del data['prep_id']
obs = self.post('/qiita_db/artifact/', headers=self.header, data=data)
jid = obs.body.decode("utf-8")

job = qdb.processing_job.ProcessingJob(jid)
while job.status not in ('error', 'success'):
sleep(0.5)

# now the original job should have 4 children and make sure they have
# the same parent and parameters
children = input_artifact.children
self.assertEqual(len(children), 4)
for c in children[1:]:
self.assertCountEqual(children[0].processing_parameters.values,
c.processing_parameters.values)
self.assertEqual(children[0].parents, c.parents)

# now let's test adding an artifact directly to a new prep
new_prep = qdb.metadata_template.prep_template.PrepTemplate.create(
pd.DataFrame({'new_col': {'1.SKB1.640202': 1,
'1.SKD3.640198': 2,
'1.SKM4.640180': 3}}),
qdb.study.Study(1), '16S')
fd, fp = mkstemp(suffix='_table.biom')
close(fd)
with biom_open(fp, 'w') as f:
et.to_hdf5(f, "test")
self._clean_up_files.append(fp)
data = {'user_email': '[email protected]',
'artifact_type': 'BIOM', 'prep_id': new_prep.id,
'files': dumps({'biom': [fp]})}

obs = self.post('/qiita_db/artifact/', headers=self.header, data=data)
jid = obs.body.decode("utf-8")

job = qdb.processing_job.ProcessingJob(jid)
while job.status not in ('error', 'success'):
sleep(0.5)
self.assertIsNotNone(new_prep.artifact)


if __name__ == '__main__':
main()
44 changes: 32 additions & 12 deletions qiita_db/processing_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -1115,6 +1115,14 @@ def _complete_artifact_definition(self, artifact_data):
Dict with the artifact information. `filepaths` contains the list
of filepaths and filepath types for the artifact and
`artifact_type` the type of the artifact

Notes
-----
The `provenance` in the job.parameters can contain a `direct_creation`
flag to avoid having to wait for the complete job to create a new
artifact, which is normally ran during regular processing. Skipping is
fine because we are adding an artifact to an existing job outside of
regular processing
"""
with qdb.sql_connection.TRN:
atype = artifact_data['artifact_type']
Expand All @@ -1125,18 +1133,30 @@ def _complete_artifact_definition(self, artifact_data):
if job_params['provenance'] is not None:
# The artifact is a result from a previous job
provenance = loads(job_params['provenance'])
if provenance.get('data_type') is not None:
artifact_data = {'data_type': provenance['data_type'],
'artifact_data': artifact_data}

sql = """UPDATE qiita.processing_job_validator
SET artifact_info = %s
WHERE validator_id = %s"""
qdb.sql_connection.TRN.add(
sql, [dumps(artifact_data), self.id])
qdb.sql_connection.TRN.execute()
# Can't create the artifact until all validators are completed
self._set_status('waiting')
if provenance.get('direct_creation', False):
original_job = ProcessingJob(provenance['job'])
qdb.artifact.Artifact.create(
filepaths, atype,
parents=original_job.input_artifacts,
processing_parameters=original_job.parameters,
analysis=job_params['analysis'],
name=job_params['name'])
self._set_status('success')
else:
if provenance.get('data_type') is not None:
artifact_data = {'data_type': provenance['data_type'],
'artifact_data': artifact_data}

sql = """UPDATE qiita.processing_job_validator
SET artifact_info = %s
WHERE validator_id = %s"""
qdb.sql_connection.TRN.add(
sql, [dumps(artifact_data), self.id])
qdb.sql_connection.TRN.execute()

# Can't create the artifact until all validators
# are completed
self._set_status('waiting')
else:
# The artifact is uploaded by the user or is the initial
# artifact of an analysis
Expand Down
4 changes: 3 additions & 1 deletion qiita_pet/webserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,8 @@
JobHandler, HeartbeatHandler, ActiveStepHandler, CompleteHandler,
ProcessingJobAPItestHandler)
from qiita_db.handlers.artifact import (
ArtifactHandler, ArtifactAPItestHandler, ArtifactTypeHandler)
ArtifactHandler, ArtifactAPItestHandler, ArtifactTypeHandler,
APIArtifactHandler)
from qiita_db.handlers.sample_information import SampleInfoDBHandler
from qiita_db.handlers.user import UserInfoDBHandler, UsersListDBHandler
from qiita_db.handlers.prep_template import (
Expand Down Expand Up @@ -205,6 +206,7 @@ def __init__(self):
(r"/qiita_db/jobs/(.*)", JobHandler),
(r"/qiita_db/artifacts/types/", ArtifactTypeHandler),
(r"/qiita_db/artifacts/(.*)/", ArtifactHandler),
(r"/qiita_db/artifact/", APIArtifactHandler),
(r"/qiita_db/users/", UsersListDBHandler),
(r"/qiita_db/user/(.*)/data/", UserInfoDBHandler),
(r"/qiita_db/sample_information/(.*)/data/", SampleInfoDBHandler),
Expand Down