Skip to content
Snippets Groups Projects
Commit 916bfc17 authored by Ambrus Simon's avatar Ambrus Simon
Browse files

Include analysis inputs in batch downloads

parent 2f939d70
No related branches found
No related tags found
No related merge requests found
......@@ -33,7 +33,9 @@ def _filter_check(property_filter, property_values):
class Download(base.RequestHandler):
def _append_targets(self, targets, cont_name, container, prefix, total_size, total_cnt, data_path, filters):
for f in container.get('files', []):
inputs = [('input', f) for f in container.get('inputs', [])]
outputs = [('output', f) for f in container.get('files', [])]
for file_group, f in inputs + outputs:
if filters:
filtered = True
for filter_ in filters:
......@@ -49,10 +51,9 @@ class Download(base.RequestHandler):
filepath = os.path.join(data_path, util.path_from_hash(f['hash']))
if os.path.exists(filepath): # silently skip missing files
if cont_name == 'analyses':
# TODO implement analysis input downloads via generic download endpoint, too
targets.append((filepath, prefix + '/output/' + f['name'], cont_name, str(container.get('_id')),f['size']))
targets.append((filepath, '/'.join([prefix, file_group, f['name']]), cont_name, str(container.get('_id')), f['size']))
else:
targets.append((filepath, prefix + '/' + f['name'], cont_name, str(container.get('_id')),f['size']))
targets.append((filepath, prefix + '/' + f['name'], cont_name, str(container.get('_id')), f['size']))
total_size += f['size']
total_cnt += 1
else:
......@@ -214,7 +215,7 @@ class Download(base.RequestHandler):
total_size, file_cnt = self._append_targets(targets, 'acquisitions', acq, prefix, total_size, file_cnt, data_path, req_spec.get('filters'))
elif item['level'] == 'analysis':
analysis = config.db.analyses.find_one(base_query, ['parent', 'label', 'files', 'uid', 'timestamp'])
analysis = config.db.analyses.find_one(base_query, ['parent', 'label', 'inputs', 'files', 'uid', 'timestamp'])
if not analysis:
# silently(while logging it) skip missing objects/objects user does not have access to
log.warn("Expected anaylysis {} to exist but it is missing. Node will be skipped".format(item_id))
......
......@@ -84,6 +84,29 @@ def test_legacy_analysis(data_builder, as_admin, file_form, api_db):
api_db.analyses.delete_one({'_id': bson.ObjectId(analysis)})
def test_analysis_download(data_builder, as_admin, file_form, api_db):
session = data_builder.create_session()
r = as_admin.post('/sessions/' + session + '/analyses', files=file_form('input.csv', 'output.csv', meta={
'label': 'legacy',
'inputs': [{'name': 'input.csv', 'info': {'foo': 'foo'}}],
'outputs': [{'name': 'output.csv', 'info': {'bar': 'bar'}}],
}))
assert r.ok
analysis = r.json()['_id']
# Get download ticket for analysis via /download
r = as_admin.get('/download', params={'ticket': ''}, json={'optional':True, 'nodes': [{'level':'analysis','_id': analysis}]})
assert r.ok
ticket = r.json()['ticket']
# Verify both inputs and outputs are present
r = as_admin.get('/download', params={'ticket': ticket})
assert r.ok
with tarfile.open(mode='r', fileobj=cStringIO.StringIO(r.content)) as tar:
assert set(m.name for m in tar.getmembers()) == set(['legacy/input/input.csv', 'legacy/output/output.csv'])
def check_files(as_admin, analysis_id, filegroup, *filenames):
# Verify that filegroup has all files, inflated
r = as_admin.get('/analyses/' + analysis_id)
......
......@@ -451,9 +451,9 @@ def test_analysis_download(data_builder, file_form, as_admin, as_drone, default_
r = as_admin.get('/download', params={'ticket': ticket})
assert r.ok
# Check to make sure outputs are in tar
# Check to make sure inputs and outputs are in tar
with tarfile.open(mode='r', fileobj=cStringIO.StringIO(r.content)) as tar:
assert [m.name for m in tar.getmembers()] == ['test/output/two.zip']
assert set([m.name for m in tar.getmembers()]) == set(['test/input/one.csv', 'test/output/two.zip'])
# try to get download ticket for non-existent analysis file
r = as_admin.get(analysis_inputs + '/non-existent.csv')
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment