Skip to content
Snippets Groups Projects
Commit 3b463536 authored by Ambrus Simon's avatar Ambrus Simon
Browse files

WIP start migrating AnalysisHandler

parent adc1f632
No related branches found
No related tags found
No related merge requests found
...@@ -9,7 +9,8 @@ from .handlers.containerhandler import ContainerHandler ...@@ -9,7 +9,8 @@ from .handlers.containerhandler import ContainerHandler
from .handlers.dataexplorerhandler import DataExplorerHandler from .handlers.dataexplorerhandler import DataExplorerHandler
from .handlers.devicehandler import DeviceHandler from .handlers.devicehandler import DeviceHandler
from .handlers.grouphandler import GroupHandler from .handlers.grouphandler import GroupHandler
from .handlers.listhandler import AnalysesHandler, ListHandler, FileListHandler, NotesListHandler, PermissionsListHandler, TagsListHandler from .handlers.listhandler import ListHandler, FileListHandler, NotesListHandler, PermissionsListHandler, TagsListHandler
from .handlers.refererhandler import AnalysesHandler
from .handlers.reporthandler import ReportHandler from .handlers.reporthandler import ReportHandler
from .handlers.resolvehandler import ResolveHandler from .handlers.resolvehandler import ResolveHandler
from .handlers.roothandler import RootHandler from .handlers.roothandler import RootHandler
...@@ -232,16 +233,13 @@ endpoints = [ ...@@ -232,16 +233,13 @@ endpoints = [
# Containers # Containers
route('/<cont_name:{cname}>', ContainerHandler, name='cont_list', h='get_all', m=['GET']), route( '/<cont_name:{cname}>', ContainerHandler, name='cont_list', h='get_all', m=['GET']),
route('/<cont_name:{cname}>', ContainerHandler, m=['POST']), route( '/<cont_name:{cname}>', ContainerHandler, m=['POST']),
prefix('/<cont_name:{cname}>', [ prefix('/<cont_name:{cname}>', [
route('/<cid:{cid}>', ContainerHandler, m=['GET','PUT','DELETE']), route( '/<cid:{cid}>', ContainerHandler, m=['GET','PUT','DELETE']),
prefix('/<cid:{cid}>', [ prefix('/<cid:{cid}>', [
route('/<list_name:tags>', TagsListHandler, m=['POST']),
route('/<list_name:tags>', TagsListHandler, m=['POST']), route('/<list_name:tags>/<value:{tag}>', TagsListHandler, m=['GET', 'PUT', 'DELETE']),
route('/<list_name:tags>/<value:{tag}>', TagsListHandler, m=['GET', 'PUT', 'DELETE']),
route('/packfile-start', FileListHandler, h='packfile_start', m=['POST']), route('/packfile-start', FileListHandler, h='packfile_start', m=['POST']),
route('/packfile', FileListHandler, h='packfile', m=['POST']), route('/packfile', FileListHandler, h='packfile', m=['POST']),
...@@ -250,16 +248,17 @@ endpoints = [ ...@@ -250,16 +248,17 @@ endpoints = [
route('/<list_name:files>/<name:{fname}>', FileListHandler, m=['GET', 'DELETE']), route('/<list_name:files>/<name:{fname}>', FileListHandler, m=['GET', 'DELETE']),
route('/<list_name:files>/<name:{fname}>/info', FileListHandler, h='get_info', m=['GET']), route('/<list_name:files>/<name:{fname}>/info', FileListHandler, h='get_info', m=['GET']),
route( '/<child_name:analyses>', AnalysesHandler, m=['POST']),
route('/<list_name:analyses>', AnalysesHandler, m=['POST']), prefix('/<child_name:analyses>', [
# Could be in a prefix. Had weird syntax highlighting issues so leaving for another day route('/<_id:{cid}>', AnalysesHandler, m=['GET', 'DELETE']),
route('/<list_name:analyses>/<_id:{cid}>', AnalysesHandler, m=['GET', 'DELETE']), route('/<_id:{cid}>/files', AnalysesHandler, h='download', m=['GET']),
route('/<list_name:analyses>/<_id:{cid}>/files', AnalysesHandler, h='download', m=['GET']), route('/<_id:{cid}>/files/<name:{fname}>', AnalysesHandler, h='download', m=['GET']),
route('/<list_name:analyses>/<_id:{cid}>/files/<name:{fname}>', AnalysesHandler, h='download', m=['GET']), route('/<_id:{cid}>/notes', AnalysesHandler, h='add_note', m=['POST']),
route('/<list_name:analyses>/<_id:{cid}>/notes', AnalysesHandler, h='add_note', m=['POST']), route('/<_id:{cid}>/notes/<note_id:{cid}>', AnalysesHandler, h='delete_note', m=['DELETE']),
route('/<list_name:analyses>/<_id:{cid}>/notes/<note_id:{cid}>', AnalysesHandler, h='delete_note', m=['DELETE']), ]),
route('/<list_name:notes>', NotesListHandler, m=['POST']),
route('/<list_name:notes>/<_id:{nid}>', NotesListHandler, name='notes', m=['GET', 'PUT', 'DELETE']), route('/<list_name:notes>', NotesListHandler, m=['POST']),
route('/<list_name:notes>/<_id:{nid}>', NotesListHandler, name='notes', m=['GET', 'PUT', 'DELETE']),
]) ])
]), ]),
......
"""
Purpose of this module is to define all the permissions checker decorators for the RefererHandler classes.
"""
from . import _get_access, INTEGER_ROLES
def default_referer(handler, parent_container=None):
def g(exec_op):
def f(method, _id, query_params=None, payload=None, exclude_params=None):
access = _get_access(handler.uid, handler.user_site, parent_container)
if method == 'GET' and parent_container.get('public', False):
has_access = True
elif method == 'GET':
has_access = access >= INTEGER_ROLES['ro']
elif method in ['POST', 'PUT', 'DELETE']:
has_access = access >= INTEGER_ROLES['rw']
else:
has_access = False
if has_access:
return exec_op(method, _id, query_params, payload, exclude_params)
else:
handler.abort(403, 'user not authorized to perform a {} operation on parent container'.format(method))
return f
return g
def public_request(handler, parent_container=None):
def g(exec_op):
def f(method, _id=None, payload=None):
if method == 'GET' and parent_container.get('public', False):
return exec_op(method, _id, payload)
else:
handler.abort(403, 'not authorized to perform a {} operation on parent container'.format(method))
return f
return g
...@@ -4,6 +4,14 @@ from .. import config ...@@ -4,6 +4,14 @@ from .. import config
from ..auth import INTEGER_ROLES from ..auth import INTEGER_ROLES
CONT_TYPES = ['acquisition', 'analysis', 'collection', 'group', 'project', 'session'] CONT_TYPES = ['acquisition', 'analysis', 'collection', 'group', 'project', 'session']
SINGULAR_TO_PLURAL = {
'group': 'groups',
'project': 'projects',
'session': 'sessions',
'acquisition': 'acquisitions',
'analysis': 'analyses',
}
PLURAL_TO_SINGULAR = {p: s for s, p in singular_to_plural.iteritems()}
def get_perm(name): def get_perm(name):
...@@ -128,6 +136,13 @@ class ContainerReference(object): ...@@ -128,6 +136,13 @@ class ContainerReference(object):
result = config.db[self.collection].find_one({'_id': bson.ObjectId(self.id)}) result = config.db[self.collection].find_one({'_id': bson.ObjectId(self.id)})
if result is None: if result is None:
raise Exception('No such {} {} in database'.format(self.type, self.id)) raise Exception('No such {} {} in database'.format(self.type, self.id))
if 'parent' in result:
parent_collection = singular_to_plural[result['parent']['type']]
parent = config.db[parent_collection].find_one({'_id': bson.ObjectId(result['parent']['id'])})
if parent is None:
raise Exception('Cannot find parent {} {} of {} {}'.format(
result['parent']['type'], result['parent']['id'], self.type, self.id))
result['permissions'] = parent['permissions']
return result return result
def find_file(self, filename): def find_file(self, filename):
...@@ -138,10 +153,10 @@ class ContainerReference(object): ...@@ -138,10 +153,10 @@ class ContainerReference(object):
return None return None
def file_uri(self, filename): def file_uri(self, filename):
if self.type == 'analysis': cont = self.get()
analysis = self.get() if 'parent' in cont:
par_coll, par_id = singular_to_plural[analysis['parent']['type']], analysis['parent']['id'] par_coll, par_id = singular_to_plural[cont['parent']['type']], cont['parent']['id']
return '/{}/{}/analyses/{}/files/{}'.format(par_coll, par_id, self.id, filename) return '/{}/{}/{}/{}/files/{}'.format(par_coll, par_id, self.collection, self.id, filename)
return '/{}/{}/files/{}'.format(self.collection, self.id, filename) return '/{}/{}/files/{}'.format(self.collection, self.id, filename)
def check_access(self, uid, perm_name): def check_access(self, uid, perm_name):
...@@ -179,13 +194,16 @@ def create_containerreference_from_filereference(fr): ...@@ -179,13 +194,16 @@ def create_containerreference_from_filereference(fr):
return ContainerReference.from_filereference(fr) return ContainerReference.from_filereference(fr)
singular_to_plural = { def pluralize(cont_name):
'group': 'groups', if cont_name in SINGULAR_TO_PLURAL:
'project': 'projects', return SINGULAR_TO_PLURAL[cont_name]
'session': 'sessions', elif cont_name in PLURAL_TO_SINGULAR:
'acquisition': 'acquisitions', return cont_name
'analysis': 'analyses', raise Exception('Could not pluralize unknown container name {}'.format(cont_name))
'file': 'files',
}
plural_to_singular = {p: s for s, p in singular_to_plural.iteritems()} def singularize(cont_name):
if cont_name in PLURAL_TO_SINGULAR:
return PLURAL_TO_SINGULAR[cont_name]
elif cont_name in SINGULAR_TO_PLURAL:
return cont_name
raise Exception('Could not singularize unknown container name {}'.format(cont_name))
...@@ -59,13 +59,6 @@ def initialize_list_configurations(): ...@@ -59,13 +59,6 @@ def initialize_list_configurations():
'storage_schema_file': 'note.json', 'storage_schema_file': 'note.json',
'input_schema_file': 'note.json' 'input_schema_file': 'note.json'
}, },
'analyses': {
'storage': liststorage.AnalysesStorage,
'permchecker': listauth.default_sublist,
'use_object_id': True,
'storage_schema_file': 'analysis.json',
'input_schema_file': 'analysis.json'
}
} }
list_container_configurations = { list_container_configurations = {
'groups': { 'groups': {
...@@ -619,320 +612,3 @@ class FileListHandler(ListHandler): ...@@ -619,320 +612,3 @@ class FileListHandler(ListHandler):
metadata = json.loads(self.request.GET.get('metadata')) metadata = json.loads(self.request.GET.get('metadata'))
return upload.process_upload(self.request, upload.Strategy.packfile, origin=self.origin, context={'token': token_id}, response=self.response, metadata=metadata) return upload.process_upload(self.request, upload.Strategy.packfile, origin=self.origin, context={'token': token_id}, response=self.response, metadata=metadata)
class AnalysesHandler(ListHandler):
def _check_ticket(self, ticket_id, _id, filename):
ticket = config.db.downloads.find_one({'_id': ticket_id})
if not ticket:
self.abort(404, 'no such ticket')
if ticket['ip'] != self.request.client_addr:
self.abort(400, 'ticket not for this source IP')
if not filename:
return self._check_ticket_for_batch(ticket)
if ticket.get('filename') != filename or ticket['target'] != _id:
self.abort(400, 'ticket not for this resource')
return ticket
def _check_ticket_for_batch(self, ticket):
if ticket.get('type') != 'batch':
self.abort(400, 'ticket not for this resource')
return ticket
def post(self, cont_name, list_name, **kwargs):
"""
Default behavior:
Creates an analysis object and uploads supplied input
and output files.
When param ``job`` is true:
Creates an analysis object and job object that reference
each other via ``job`` and ``destination`` fields. Job based
analyses are only allowed at the session level.
"""
_id = kwargs.pop('cid')
permchecker, storage, _, payload_validator, _ = self._initialize_request(cont_name, list_name, _id)
permchecker(noop)('POST', _id=_id)
if self.is_true('job'):
if cont_name == 'sessions':
payload = self.request.json_body
payload_validator(payload.get('analysis',{}), 'POST')
analysis = payload.get('analysis')
job = payload.get('job')
if job is None or analysis is None:
self.abort(400, 'JSON body must contain map for "analysis" and "job"')
result = storage.create_job_and_analysis(cont_name, _id, analysis, job, self.origin)
return {'_id': result['analysis']['_id']}
else:
self.abort(400, 'Analysis created via a job must be at the session level')
payload = upload.process_upload(self.request, upload.Strategy.analysis, origin=self.origin)
analysis = storage.default_analysis(self.origin)
analysis.update(payload)
result = storage.exec_op('POST', _id=_id, payload=analysis)
if result.modified_count == 1:
return {'_id': analysis['_id']}
else:
self.abort(500, 'Element not added in list analyses of container {} {}'.format(cont_name, _id))
def download(self, cont_name, list_name, **kwargs):
"""
.. http:get:: /api/(cont_name)/(cid)/analyses/(analysis_id)/files/(file_name)
Download a file from an analysis or download a tar of all files
When no filename is provided, a tar of all input and output files is created.
The first request to this endpoint without a ticket ID generates a download ticket.
A request to this endpoint with a ticket ID downloads the file(s).
If the analysis object is tied to a job, the input file(s) are inlfated from
the job's ``input`` array.
:param cont_name: one of ``projects``, ``sessions``, ``collections``
:type cont_name: string
:param cid: Container ID
:type cid: string
:param analysis_id: Analysis ID
:type analysis_id: string
:param filename: (Optional) Filename of specific file to download
:type cid: string
:query string ticket: Download ticket ID
:statuscode 200: no error
:statuscode 404: No files on analysis ``analysis_id``
:statuscode 404: Could not find file ``filename`` on analysis ``analysis_id``
**Example request without ticket ID**:
.. sourcecode:: http
GET /api/sessions/57081d06b386a6dc79ca383c/analyses/5751cd3781460100a66405c8/files HTTP/1.1
Host: demo.flywheel.io
Accept: */*
**Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept-Encoding
Content-Type: application/json; charset=utf-8
{
"ticket": "57f2af23-a94c-426d-8521-11b2e8782020",
"filename": "analysis_5751cd3781460100a66405c8.tar",
"file_cnt": 3,
"size": 4525137
}
**Example request with ticket ID**:
.. sourcecode:: http
GET /api/sessions/57081d06b386a6dc79ca383c/analyses/5751cd3781460100a66405c8/files?ticket=57f2af23-a94c-426d-8521-11b2e8782020 HTTP/1.1
Host: demo.flywheel.io
Accept: */*
**Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept-Encoding
Content-Type: application/octet-stream
Content-Disposition: attachment; filename=analysis_5751cd3781460100a66405c8.tar;
**Example Request with filename**:
.. sourcecode:: http
GET /api/sessions/57081d06b386a6dc79ca383c/analyses/5751cd3781460100a66405c8/files/exampledicom.zip?ticket= HTTP/1.1
Host: demo.flywheel.io
Accept: */*
**Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept-Encoding
Content-Type: application/json; charset=utf-8
{
"ticket": "57f2af23-a94c-426d-8521-11b2e8782020",
"filename": "exampledicom.zip",
"file_cnt": 1,
"size": 4525137
}
"""
_id = kwargs.pop('cid')
permchecker, storage, _, _, _ = self._initialize_request(cont_name, list_name, _id)
filename = kwargs.get('name')
ticket_id = self.get_param('ticket')
ticket = None
if ticket_id is None:
permchecker(noop)('GET', _id=_id)
elif ticket_id != '':
ticket = self._check_ticket(ticket_id, _id, filename)
if not self.origin.get('id'):
self.origin = ticket.get('origin')
analysis_id = kwargs.get('_id')
fileinfo = storage.get_fileinfo(_id, analysis_id, filename)
if fileinfo is None:
error_msg = 'No files on analysis {}'.format(analysis_id)
if filename:
error_msg = 'Could not find file {} on analysis {}'.format(filename, analysis_id)
self.abort(404, error_msg)
if ticket_id == '':
if filename:
total_size = fileinfo[0]['size']
file_cnt = 1
ticket = util.download_ticket(self.request.client_addr, 'file', _id, filename, total_size, origin=self.origin)
else:
targets, total_size, file_cnt = self._prepare_batch(fileinfo)
analysis_label = util.sanitize_string_to_filename(storage.get_analysis(_id, analysis_id).get('label', 'No Label'))
filename = 'analysis_' + analysis_label + '.tar'
ticket = util.download_ticket(self.request.client_addr, 'batch', targets, filename, total_size, origin=self.origin)
return {
'ticket': config.db.downloads.insert_one(ticket).inserted_id,
'size': total_size,
'file_cnt': file_cnt,
'filename': filename
}
else:
if not filename:
if ticket:
self._send_batch(ticket)
else:
self.abort(400, 'batch downloads require a ticket')
elif not fileinfo:
self.abort(404, "{} doesn't exist".format(filename))
else:
fileinfo = fileinfo[0]
filepath = os.path.join(
config.get_item('persistent', 'data_path'),
util.path_from_hash(fileinfo['hash'])
)
filename = fileinfo['name']
# Request for info about zipfile
if self.is_true('info'):
try:
info = FileListHandler.build_zip_info(filepath)
except zipfile.BadZipfile:
self.abort(400, 'not a zip file')
return info
# Request to download zipfile member
elif self.get_param('member') is not None:
zip_member = self.get_param('member')
try:
with zipfile.ZipFile(filepath) as zf:
self.response.headers['Content-Type'] = util.guess_mimetype(zip_member)
self.response.write(zf.open(zip_member).read())
except zipfile.BadZipfile:
self.abort(400, 'not a zip file')
except KeyError:
self.abort(400, 'zip file contains no such member')
# log download if we haven't already for this ticket
if ticket:
if not ticket.get('logged', False):
self.log_user_access(AccessType.download_file, cont_name=cont_name, cont_id=_id)
config.db.downloads.update_one({'_id': ticket_id}, {'$set': {'logged': True}})
else:
self.log_user_access(AccessType.download_file, cont_name=cont_name, cont_id=_id)
# Request to download the file itself
else:
self.response.app_iter = open(filepath, 'rb')
self.response.headers['Content-Length'] = str(fileinfo['size']) # must be set after setting app_iter
if self.is_true('view'):
self.response.headers['Content-Type'] = str(fileinfo.get('mimetype', 'application/octet-stream'))
else:
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.headers['Content-Disposition'] = 'attachment; filename=' + str(filename)
# log download if we haven't already for this ticket
if ticket:
ticket = config.db.downloads.find_one({'_id': ticket_id})
if not ticket.get('logged', False):
self.log_user_access(AccessType.download_file, cont_name=cont_name, cont_id=_id)
config.db.downloads.update_one({'_id': ticket_id}, {'$set': {'logged': True}})
else:
self.log_user_access(AccessType.download_file, cont_name=cont_name, cont_id=_id)
@log_access(AccessType.delete_analysis)
def delete(self, cont_name, list_name, **kwargs):
# Overriding base class delete to audit action before completion
_id = kwargs.pop('cid')
permchecker, storage, _, _, keycheck = self._initialize_request(cont_name, list_name, _id, query_params=kwargs)
permchecker(noop)('DELETE', _id=_id, query_params=kwargs)
self.log_user_access(AccessType.delete_file, cont_name=cont_name, cont_id=_id)
try:
result = keycheck(storage.exec_op)('DELETE', _id, query_params=kwargs)
except APIStorageException as e:
self.abort(400, e.message)
if result.modified_count == 1:
return {'modified': result.modified_count}
else:
self.abort(404, 'Element not removed from list {} in container {} {}'.format(storage.list_name, storage.cont_name, _id))
def _prepare_batch(self, fileinfo):
## duplicated code from download.py
## we need a way to avoid this
targets = []
total_size = total_cnt = 0
data_path = config.get_item('persistent', 'data_path')
for f in fileinfo:
filepath = os.path.join(data_path, util.path_from_hash(f['hash']))
if os.path.exists(filepath): # silently skip missing files
targets.append((filepath, 'analyses/' + f['name'], f['size']))
total_size += f['size']
total_cnt += 1
return targets, total_size, total_cnt
def _send_batch(self, ticket):
self.response.app_iter = download.archivestream(ticket)
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.headers['Content-Disposition'] = 'attachment; filename=' + str(ticket['filename'])
def delete_note(self, cont_name, list_name, **kwargs):
_id = kwargs.pop('cid')
analysis_id = kwargs.pop('_id')
permchecker, storage, _, _, _ = self._initialize_request(cont_name, list_name, _id)
note_id = kwargs.get('note_id')
permchecker(noop)('DELETE', _id=_id)
result = storage.delete_note(_id=_id, analysis_id=analysis_id, note_id=note_id)
if result.modified_count == 1:
return {'modified':result.modified_count}
else:
self.abort(404, 'Element not removed from list {} of container {} {}'.format(storage.list_name, storage.cont_name, _id))
def add_note(self, cont_name, list_name, **kwargs):
_id = kwargs.pop('cid')
analysis_id = kwargs.get('_id')
permchecker, storage, _, _, _ = self._initialize_request(cont_name, list_name, _id)
payload = self.request.json_body
notes_schema_file = list_handler_configurations[cont_name]['notes']['storage_schema_file']
input_schema_uri = validators.schema_uri('input', notes_schema_file)
input_validator = validators.from_schema_path(input_schema_uri)
input_validator(payload, 'POST')
payload['_id'] = str(bson.objectid.ObjectId())
payload['user'] = payload.get('user', self.uid)
payload['created'] = datetime.datetime.utcnow()
permchecker(noop)('POST', _id=_id)
result = storage.add_note(_id=_id, analysis_id=analysis_id, payload=payload)
if result.modified_count == 1:
return {'modified':result.modified_count}
else:
self.abort(404, 'Element not added in list {} of container {} {}'.format(storage.list_name, storage.cont_name, _id))
"""
Module defining RefererHandler and it's subclasses. RefererHandler
generalizes the handling of documents that have their own collection
and always have an associated parent container they refer to.
"""
from .. import config
from .. import upload
from ..auth import refererauth, always_ok
from ..dao import containerutil, noop
from ..web import base
from ..web.request import log_access, AccessType
from .collectionshandler import CollectionsHandler
log = config.log
class RefererHandler(base.RequestHandler):
container_storages = {
cont_name: handler_config['storage']
for cont_name, handler_config in CollectionsHandler.container_handler_configurations.iteritems()}
referer_handler_configurations = {
'analyses': {
'storage':
'storage_schema_file': 'analysis.json',
'payload_schema_file': 'analysis.json',
'permchecker': refererauth.default_referer,
},
}
@classmethod
def _get_container(cls, cont_name, _id):
cont_name = containerutil.pluralize(cont_name)
storage = cls.container_storages[cont_name]
storage.get_container(_id)
def _get_permchecker(container):
if self.superuser_request:
return always_ok
elif self.public_request:
return refererauth.public_request(self, container)
else:
permchecker = self.config['permchecker']
return permchecker(self, container)
class AnalysesHandler(RefererHandler):
def post(self, cont_name, cid, **kwargs):
"""
Default behavior:
Creates an analysis object and uploads supplied input
and output files.
When param ``job`` is true:
Creates an analysis object and job object that reference
each other via ``job`` and ``destination`` fields. Job based
analyses are only allowed at the session level.
"""
container = self._get_container(cont_name, cid)
permchecker = self._get_permchecker(container=container)
permchecker(noop)('POST', container)
if self.is_true('job'):
if cont_name == 'sessions':
payload = self.request.json_body
payload_validator(payload.get('analysis',{}), 'POST')
analysis = payload.get('analysis')
job = payload.get('job')
if job is None or analysis is None:
self.abort(400, 'JSON body must contain map for "analysis" and "job"')
result = storage.create_job_and_analysis(cont_name, _id, analysis, job, self.origin)
return {'_id': result['analysis']['_id']}
else:
self.abort(400, 'Analysis created via a job must be at the session level')
# _id = kwargs.pop('cid')
# permchecker, storage, _, payload_validator, _ = self._initialize_request(cont_name, list_name, _id)
# permchecker(noop)('POST', _id=_id)
payload = upload.process_upload(self.request, upload.Strategy.analysis, origin=self.origin)
analysis = storage.default_analysis(self.origin)
analysis.update(payload)
result = storage.exec_op('POST', _id=_id, payload=analysis)
if result.modified_count == 1:
return {'_id': analysis['_id']}
else:
self.abort(500, 'Element not added in list analyses of container {} {}'.format(cont_name, _id))
def _get_parent_container(self, payload):
if not self.config.get('parent_storage'):
return None, None
parent_storage = self.config['parent_storage']
parent_id_property = parent_storage.cont_name[:-1]
parent_id = payload.get(parent_id_property)
if parent_id:
parent_storage.dbc = config.db[parent_storage.cont_name]
parent_container = parent_storage.get_container(parent_id)
if parent_container is None:
self.abort(404, 'Element {} not found in container {}'.format(parent_id, parent_storage.cont_name))
else:
parent_container = None
return parent_container, parent_id_property
@log_access(AccessType.delete_analysis)
def delete(self, **kwargs):
# Overriding base class delete to audit action before completion
_id = kwargs.pop('cid')
permchecker, storage, _, _, keycheck = self._initialize_request(cont_name, list_name, _id, query_params=kwargs)
permchecker(noop)('DELETE', _id=_id, query_params=kwargs)
self.log_user_access(AccessType.delete_file, cont_name=cont_name, cont_id=_id)
try:
result = keycheck(storage.exec_op)('DELETE', _id, query_params=kwargs)
except APIStorageException as e:
self.abort(400, e.message)
if result.modified_count == 1:
return {'modified': result.modified_count}
else:
self.abort(404, 'Element not removed from list {} in container {} {}'.format(storage.list_name, storage.cont_name, _id))
def add_note(self, cont_name, cid, child_name, _id):
_id = kwargs.pop('cid')
analysis_id = kwargs.get('_id')
permchecker, storage, _, _, _ = self._initialize_request(cont_name, list_name, _id)
payload = self.request.json_body
notes_schema_file = list_handler_configurations[cont_name]['notes']['storage_schema_file']
input_schema_uri = validators.schema_uri('input', notes_schema_file)
input_validator = validators.from_schema_path(input_schema_uri)
input_validator(payload, 'POST')
payload['_id'] = str(bson.objectid.ObjectId())
payload['user'] = payload.get('user', self.uid)
payload['created'] = datetime.datetime.utcnow()
permchecker(noop)('POST', _id=_id)
result = storage.add_note(_id=_id, analysis_id=analysis_id, payload=payload)
if result.modified_count == 1:
return {'modified':result.modified_count}
else:
self.abort(404, 'Element not added in list {} of container {} {}'.format(storage.list_name, storage.cont_name, _id))
def delete_note(self, cont_name, list_name, **kwargs):
_id = kwargs.pop('cid')
analysis_id = kwargs.pop('_id')
permchecker, storage, _, _, _ = self._initialize_request(cont_name, list_name, _id)
note_id = kwargs.get('note_id')
permchecker(noop)('DELETE', _id=_id)
result = storage.delete_note(_id=_id, analysis_id=analysis_id, note_id=note_id)
if result.modified_count == 1:
return {'modified': result.modified_count}
else:
self.abort(404, 'Note not removed from analysis {}'.format(analysis_id))
def download(self, **kwargs):
"""
.. http:get:: /api/(cont_name)/(cid)/analyses/(analysis_id)/files/(file_name)
Download a file from an analysis or download a tar of all files
When no filename is provided, a tar of all input and output files is created.
The first request to this endpoint without a ticket ID generates a download ticket.
A request to this endpoint with a ticket ID downloads the file(s).
If the analysis object is tied to a job, the input file(s) are inlfated from
the job's ``input`` array.
:param cont_name: one of ``projects``, ``sessions``, ``collections``
:type cont_name: string
:param cid: Container ID
:type cid: string
:param analysis_id: Analysis ID
:type analysis_id: string
:param filename: (Optional) Filename of specific file to download
:type cid: string
:query string ticket: Download ticket ID
:statuscode 200: no error
:statuscode 404: No files on analysis ``analysis_id``
:statuscode 404: Could not find file ``filename`` on analysis ``analysis_id``
**Example request without ticket ID**:
.. sourcecode:: http
GET /api/sessions/57081d06b386a6dc79ca383c/analyses/5751cd3781460100a66405c8/files HTTP/1.1
Host: demo.flywheel.io
Accept: */*
**Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept-Encoding
Content-Type: application/json; charset=utf-8
{
"ticket": "57f2af23-a94c-426d-8521-11b2e8782020",
"filename": "analysis_5751cd3781460100a66405c8.tar",
"file_cnt": 3,
"size": 4525137
}
**Example request with ticket ID**:
.. sourcecode:: http
GET /api/sessions/57081d06b386a6dc79ca383c/analyses/5751cd3781460100a66405c8/files?ticket=57f2af23-a94c-426d-8521-11b2e8782020 HTTP/1.1
Host: demo.flywheel.io
Accept: */*
**Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept-Encoding
Content-Type: application/octet-stream
Content-Disposition: attachment; filename=analysis_5751cd3781460100a66405c8.tar;
**Example Request with filename**:
.. sourcecode:: http
GET /api/sessions/57081d06b386a6dc79ca383c/analyses/5751cd3781460100a66405c8/files/exampledicom.zip?ticket= HTTP/1.1
Host: demo.flywheel.io
Accept: */*
**Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept-Encoding
Content-Type: application/json; charset=utf-8
{
"ticket": "57f2af23-a94c-426d-8521-11b2e8782020",
"filename": "exampledicom.zip",
"file_cnt": 1,
"size": 4525137
}
"""
_id = kwargs.pop('cid')
permchecker, storage, _, _, _ = self._initialize_request(cont_name, list_name, _id)
filename = kwargs.get('name')
ticket_id = self.get_param('ticket')
ticket = None
if ticket_id is None:
permchecker(noop)('GET', _id=_id)
elif ticket_id != '':
ticket = self._check_ticket(ticket_id, _id, filename)
if not self.origin.get('id'):
self.origin = ticket.get('origin')
analysis_id = kwargs.get('_id')
fileinfo = storage.get_fileinfo(_id, analysis_id, filename)
if fileinfo is None:
error_msg = 'No files on analysis {}'.format(analysis_id)
if filename:
error_msg = 'Could not find file {} on analysis {}'.format(filename, analysis_id)
self.abort(404, error_msg)
if ticket_id == '':
if filename:
total_size = fileinfo[0]['size']
file_cnt = 1
ticket = util.download_ticket(self.request.client_addr, 'file', _id, filename, total_size, origin=self.origin)
else:
targets, total_size, file_cnt = self._prepare_batch(fileinfo)
label = util.sanitize_string_to_filename(self.storage.get_container(_id).get('label', 'No Label'))
filename = 'analysis_' + label + '.tar'
ticket = util.download_ticket(self.request.client_addr, 'batch', targets, filename, total_size, origin=self.origin)
return {
'ticket': config.db.downloads.insert_one(ticket).inserted_id,
'size': total_size,
'file_cnt': file_cnt,
'filename': filename
}
else:
if not filename:
if ticket:
self._send_batch(ticket)
else:
self.abort(400, 'batch downloads require a ticket')
elif not fileinfo:
self.abort(404, "{} doesn't exist".format(filename))
else:
fileinfo = fileinfo[0]
filepath = os.path.join(
config.get_item('persistent', 'data_path'),
util.path_from_hash(fileinfo['hash'])
)
filename = fileinfo['name']
# Request for info about zipfile
if self.is_true('info'):
try:
info = FileListHandler.build_zip_info(filepath)
except zipfile.BadZipfile:
self.abort(400, 'not a zip file')
return info
# Request to download zipfile member
elif self.get_param('member') is not None:
zip_member = self.get_param('member')
try:
with zipfile.ZipFile(filepath) as zf:
self.response.headers['Content-Type'] = util.guess_mimetype(zip_member)
self.response.write(zf.open(zip_member).read())
except zipfile.BadZipfile:
self.abort(400, 'not a zip file')
except KeyError:
self.abort(400, 'zip file contains no such member')
# log download if we haven't already for this ticket
if ticket:
if not ticket.get('logged', False):
self.log_user_access(AccessType.download_file, cont_name=cont_name, cont_id=_id)
config.db.downloads.update_one({'_id': ticket_id}, {'$set': {'logged': True}})
else:
self.log_user_access(AccessType.download_file, cont_name=cont_name, cont_id=_id)
# Request to download the file itself
else:
self.response.app_iter = open(filepath, 'rb')
self.response.headers['Content-Length'] = str(fileinfo['size']) # must be set after setting app_iter
if self.is_true('view'):
self.response.headers['Content-Type'] = str(fileinfo.get('mimetype', 'application/octet-stream'))
else:
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.headers['Content-Disposition'] = 'attachment; filename=' + str(filename)
# log download if we haven't already for this ticket
if ticket:
ticket = config.db.downloads.find_one({'_id': ticket_id})
if not ticket.get('logged', False):
self.log_user_access(AccessType.download_file, cont_name=cont_name, cont_id=_id)
config.db.downloads.update_one({'_id': ticket_id}, {'$set': {'logged': True}})
else:
self.log_user_access(AccessType.download_file, cont_name=cont_name, cont_id=_id)
def _check_ticket(self, ticket_id, _id, filename):
ticket = config.db.downloads.find_one({'_id': ticket_id})
if not ticket:
self.abort(404, 'no such ticket')
if ticket['ip'] != self.request.client_addr:
self.abort(400, 'ticket not for this source IP')
if not filename:
return self._check_ticket_for_batch(ticket)
if ticket.get('filename') != filename or ticket['target'] != _id:
self.abort(400, 'ticket not for this resource')
return ticket
def _check_ticket_for_batch(self, ticket):
if ticket.get('type') != 'batch':
self.abort(400, 'ticket not for this resource')
return ticket
def _prepare_batch(self, fileinfo):
## duplicated code from download.py
## we need a way to avoid this
targets = []
total_size = total_cnt = 0
data_path = config.get_item('persistent', 'data_path')
for f in fileinfo:
filepath = os.path.join(data_path, util.path_from_hash(f['hash']))
if os.path.exists(filepath): # silently skip missing files
targets.append((filepath, 'analyses/' + f['name'], f['size']))
total_size += f['size']
total_cnt += 1
return targets, total_size, total_cnt
def _send_batch(self, ticket):
self.response.app_iter = download.archivestream(ticket)
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.headers['Content-Disposition'] = 'attachment; filename=' + str(ticket['filename'])
...@@ -1121,7 +1121,6 @@ def upgrade_to_32_closure(cont, cont_name): ...@@ -1121,7 +1121,6 @@ def upgrade_to_32_closure(cont, cont_name):
for analysis in cont['analyses']: for analysis in cont['analyses']:
analysis['_id'] = bson.ObjectId(analysis['_id']) analysis['_id'] = bson.ObjectId(analysis['_id'])
analysis['parent'] = {'type': cont_type, 'id': cont['_id']} analysis['parent'] = {'type': cont_type, 'id': cont['_id']}
analysis['permissions'] = cont_type['permissions'] = cont['permissions']
config.db['analyses'].insert_many(cont['analyses']) config.db['analyses'].insert_many(cont['analyses'])
config.db[cont_name].update_one( config.db[cont_name].update_one(
{'_id': cont['_id']}, {'_id': cont['_id']},
......
...@@ -4,6 +4,13 @@ ...@@ -4,6 +4,13 @@
"type": "object", "type": "object",
"properties": { "properties": {
"_id": {"type": "string"}, "_id": {"type": "string"},
"parent": {
"type": "object",
"properties": {
"type": "string",
"id": "string"
}
},
"created": {}, "created": {},
"modified": {}, "modified": {},
"notes": {"type": "array", "items": {"$ref": "note.json"}}, "notes": {"type": "array", "items": {"$ref": "note.json"}},
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment