Skip to content
Snippets Groups Projects
Commit fc86fa33 authored by Harsha Kethineni's avatar Harsha Kethineni
Browse files

Request body is now list of nodes

parent 7361cb94
No related branches found
No related tags found
No related merge requests found
...@@ -318,42 +318,62 @@ class Download(base.RequestHandler): ...@@ -318,42 +318,62 @@ class Download(base.RequestHandler):
def summary(self): def summary(self):
"""Return a summary of what has been/will be downloaded based on a given query""" """Return a summary of what has been/will be downloaded based on a given query"""
res = {}
req = self.request.json_body req = self.request.json_body
req['_id'] = bson.ObjectId(req['_id']) cont_query = {
level = req['level'] 'projects': {'_id': {'$in':[]}},
'sessions': {'_id': {'$in':[]}},
containers = ['projects', 'sessions', 'acquisitions'] 'acquisitions': {'_id': {'$in':[]}},
cont_query = {} 'analyses' : {'_id': {'$in':[]}}
if level == 'projects': }
# Grab sessions and their ids for node in req:
sessions = config.db.sessions.find({'project': req['_id']}, {'_id': 1}) node['_id'] = bson.ObjectId(node['_id'])
session_ids = [s['_id'] for s in sessions] level = node['level']
# for each type of container below it will have a slightly modified match query containers = {'projects':0, 'sessions':0, 'acquisitions':0, 'analyses':0}
cont_query = {
'projects': {'_id': req['_id']}, if level == 'project':
'sessions': {'project': req['_id']}, # Grab sessions and their ids
'acquisitions': {'session': {'$in': session_ids}} sessions = config.db.sessions.find({'project': node['_id']}, {'_id': 1})
} session_ids = [s['_id'] for s in sessions]
elif level == 'sessions': acquisitions = config.db.acquisitions.find({'session': {'$in': session_ids}}, {'_id': 1})
acquisition_ids = [a['_id'] for a in acquisitions]
# for each type of container below it will have a slightly modified match query
cont_query = { containers['projects']=1
'sessions': {'_id': req['_id']}, containers['sessions']=1
'acquisitions': {'session': req['_id']} containers['acquisitions']=1
}
containers = containers[1:] # for each type of container below it will have a slightly modified match query
elif level == 'acquisitions': cont_query.get('projects',{}).get('_id',{}).get('$in').append(node['_id'])
cont_query['sessions']['_id']['$in'] = cont_query['sessions']['_id']['$in'] + session_ids
cont_query['acquisitions'] = {'_id': req['_id']} cont_query['acquisitions']['_id']['$in'] = cont_query['acquisitions']['_id']['$in'] + acquisition_ids
containers = containers[-1:]
elif level == 'analyses': elif level == 'session':
cont_query['analyses'] = {'_id': req['_id']} acquisitions = config.db.acquisitions.find({'session': node['_id']}, {'_id': 1})
containers = ['analyses'] acquisition_ids = [a['_id'] for a in acquisitions]
else:
self.abort(400, "{} not a recognized level".format(level))
# for each type of container below it will have a slightly modified match query
cont_query.get('sessions',{}).get('_id',{}).get('$in').append(node['_id'])
cont_query['acquisitions']['_id']['$in'] = cont_query['acquisitions']['_id']['$in'] + acquisition_ids
containers['sessions']=1
containers['acquisitions']=1
elif level == 'acquisition':
cont_query.get('acquisitions',{}).get('_id',{}).get('$in').append(node['_id'])
containers['acquisitions']=1
elif level == 'analysis':
cont_query.get('analyses',{}).get('_id',{}).get('$in').append(node['_id'])
containers['analyses'] = 1
else:
self.abort(400, "{} not a recognized level".format(level))
containers = [cont for cont in containers if containers[cont] == 1]
res = {}
for cont_name in containers: for cont_name in containers:
# Aggregate file types # Aggregate file types
pipeline = [ pipeline = [
......
...@@ -425,22 +425,22 @@ def test_summary(data_builder, as_admin, file_form): ...@@ -425,22 +425,22 @@ def test_summary(data_builder, as_admin, file_form):
missing_object_id = '000000000000000000000000' missing_object_id = '000000000000000000000000'
r = as_admin.post('/download/summary', json={"level":"projects", "_id":project}) r = as_admin.post('/download/summary', json=[{"level":"project", "_id":project}])
assert r.ok assert r.ok
assert len(r.json()) == 1 assert len(r.json()) == 1
assert r.json().get("csv", {}).get("count",0) == 4 assert r.json().get("csv", {}).get("count",0) == 4
r = as_admin.post('/download/summary', json={"level":"sessions", "_id":session}) r = as_admin.post('/download/summary', json=[{"level":"session", "_id":session}])
assert r.ok assert r.ok
assert len(r.json()) == 1 assert len(r.json()) == 1
assert r.json().get("csv", {}).get("count",0) == 2 assert r.json().get("csv", {}).get("count",0) == 2
r = as_admin.post('/download/summary', json={"level":"acquisitions", "_id":acquisition}) r = as_admin.post('/download/summary', json=[{"level":"acquisition", "_id":acquisition},{"level":"acquisition", "_id":acquisition2}])
assert r.ok assert r.ok
assert len(r.json()) == 1 assert len(r.json()) == 1
assert r.json().get("csv", {}).get("count",0) == 1 assert r.json().get("csv", {}).get("count",0) == 2
r = as_admin.post('/download/summary', json={"level":"groups", "_id":missing_object_id}) r = as_admin.post('/download/summary', json=[{"level":"group", "_id":missing_object_id}])
assert r.status_code == 400 assert r.status_code == 400
r = as_admin.post('/sessions/' + session + '/analyses', files=file_form( r = as_admin.post('/sessions/' + session + '/analyses', files=file_form(
...@@ -448,7 +448,7 @@ def test_summary(data_builder, as_admin, file_form): ...@@ -448,7 +448,7 @@ def test_summary(data_builder, as_admin, file_form):
assert r.ok assert r.ok
analysis = r.json()['_id'] analysis = r.json()['_id']
r = as_admin.post('/download/summary', json={"level":"analyses", "_id":analysis}) r = as_admin.post('/download/summary', json=[{"level":"analysis", "_id":analysis}])
assert r.ok assert r.ok
assert len(r.json()) == 1 assert len(r.json()) == 1
assert r.json().get("tabular data", {}).get("count",0) == 1 assert r.json().get("tabular data", {}).get("count",0) == 1
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment