Skip to content
Snippets Groups Projects
Commit 50d84afd authored by Ambrus Simon's avatar Ambrus Simon
Browse files

fix imports/typos, reroute analysis/notes, start reworking AnalysisHandler.post

parent 06bcd0b2
No related branches found
No related tags found
No related merge requests found
...@@ -248,13 +248,11 @@ endpoints = [ ...@@ -248,13 +248,11 @@ endpoints = [
route('/<list_name:files>/<name:{fname}>', FileListHandler, m=['GET', 'DELETE']), route('/<list_name:files>/<name:{fname}>', FileListHandler, m=['GET', 'DELETE']),
route('/<list_name:files>/<name:{fname}>/info', FileListHandler, h='get_info', m=['GET']), route('/<list_name:files>/<name:{fname}>/info', FileListHandler, h='get_info', m=['GET']),
route( '/<child_name:analyses>', AnalysesHandler, m=['POST']), route( '/analyses>', AnalysesHandler, m=['POST']),
prefix('/<child_name:analyses>', [ prefix('/analyses>', [
route('/<_id:{cid}>', AnalysesHandler, m=['GET', 'DELETE']), route('/<_id:{cid}>', AnalysesHandler, m=['GET', 'DELETE']),
route('/<_id:{cid}>/files', AnalysesHandler, h='download', m=['GET']), route('/<_id:{cid}>/files', AnalysesHandler, h='download', m=['GET']),
route('/<_id:{cid}>/files/<name:{fname}>', AnalysesHandler, h='download', m=['GET']), route('/<_id:{cid}>/files/<name:{fname}>', AnalysesHandler, h='download', m=['GET']),
route('/<_id:{cid}>/notes', AnalysesHandler, h='add_note', m=['POST']),
route('/<_id:{cid}>/notes/<note_id:{cid}>', AnalysesHandler, h='delete_note', m=['DELETE']),
]), ]),
route('/<list_name:notes>', NotesListHandler, m=['POST']), route('/<list_name:notes>', NotesListHandler, m=['POST']),
...@@ -263,6 +261,14 @@ endpoints = [ ...@@ -263,6 +261,14 @@ endpoints = [
]), ]),
# Analysis notes
prefix('/{cname}/{cid}/<cont_name:analyses>/<cid:{cid}>', [
route('/<list_name:notes>', NotesListHandler, m=['POST']),
route('/<list_name:notes>/<_id:{nid}>', NotesListHandler, name='notes', m=['GET', 'PUT', 'DELETE']),
]),
# Misc (to be cleaned up later) # Misc (to be cleaned up later)
route('/<par_cont_name:groups>/<par_id:{gid}>/<cont_name:projects>', ContainerHandler, h='get_all', m=['GET']), route('/<par_cont_name:groups>/<par_id:{gid}>/<cont_name:projects>', ContainerHandler, h='get_all', m=['GET']),
......
...@@ -85,7 +85,7 @@ def collection_permissions(handler, container=None, _=None): ...@@ -85,7 +85,7 @@ def collection_permissions(handler, container=None, _=None):
return g return g
def analysis_permissions(handler, container=None, _=None): def default_referer(handler, parent_container=None):
def g(exec_op): def g(exec_op):
def f(method, _id=None, payload=None): def f(method, _id=None, payload=None):
access = _get_access(handler.uid, handler.user_site, parent_container) access = _get_access(handler.uid, handler.user_site, parent_container)
...@@ -99,7 +99,7 @@ def analysis_permissions(handler, container=None, _=None): ...@@ -99,7 +99,7 @@ def analysis_permissions(handler, container=None, _=None):
has_access = False has_access = False
if has_access: if has_access:
return exec_op(method, _id=None, payload=None) return exec_op(method, _id=_id, payload=payload)
else: else:
handler.abort(403, 'user not authorized to perform a {} operation on parent container'.format(method)) handler.abort(403, 'user not authorized to perform a {} operation on parent container'.format(method))
return f return f
......
import datetime
import bson.errors import bson.errors
import bson.objectid import bson.objectid
import pymongo.errors import pymongo.errors
from .. import util
from .. import config
from . import consistencychecker
from . import APIStorageException, APIConflictException, APINotFoundException from . import APIStorageException, APIConflictException, APINotFoundException
from . import consistencychecker
from . import containerutil
from . import hierarchy from . import hierarchy
from .. import config
from .. import util
log = config.log log = config.log
...@@ -435,7 +439,7 @@ class AcquisitionStorage(ContainerStorage): ...@@ -435,7 +439,7 @@ class AcquisitionStorage(ContainerStorage):
class AnalysisStorage(ContainerStorage): class AnalysisStorage(ContainerStorage):
def __init__(self): def __init__(self):
super(AcquisitionStorage, self).__init__('analyses', use_object_id=True) super(AnalysisStorage, self).__init__('analyses', use_object_id=True)
def get_parent(self, parent_type, parent_id): def get_parent(self, parent_type, parent_id):
...@@ -472,6 +476,10 @@ class AnalysisStorage(ContainerStorage): ...@@ -472,6 +476,10 @@ class AnalysisStorage(ContainerStorage):
Create and insert job and analysis. Create and insert job and analysis.
""" """
# TODO fix import cycle - separate analysisstorage module?
from ..jobs.gears import validate_gear_config, get_gear
from ..jobs.jobs import Job
cid = bson.objectid.ObjectId(cid) cid = bson.objectid.ObjectId(cid)
default = self.default_analysis(origin) default = self.default_analysis(origin)
...@@ -483,10 +491,10 @@ class AnalysisStorage(ContainerStorage): ...@@ -483,10 +491,10 @@ class AnalysisStorage(ContainerStorage):
files = [] # For Analysis object (list of file objects) files = [] # For Analysis object (list of file objects)
for x in job['inputs'].keys(): for x in job['inputs'].keys():
input_map = job['inputs'][x] input_map = job['inputs'][x]
fileref = create_filereference_from_dictionary(input_map) fileref = containerutil.create_filereference_from_dictionary(input_map)
inputs[x] = fileref inputs[x] = fileref
contref = create_containerreference_from_filereference(fileref) contref = containerutil.create_containerreference_from_filereference(fileref)
file_ = contref.find_file(fileref.name) file_ = contref.find_file(fileref.name)
if file_: if file_:
file_.pop('output', None) # If file was from an analysis file_.pop('output', None) # If file was from an analysis
...@@ -511,7 +519,7 @@ class AnalysisStorage(ContainerStorage): ...@@ -511,7 +519,7 @@ class AnalysisStorage(ContainerStorage):
raise APIConflictException('Gear marked as invalid, will not run!') raise APIConflictException('Gear marked as invalid, will not run!')
validate_gear_config(gear, job.get('config')) validate_gear_config(gear, job.get('config'))
destination = create_containerreference_from_dictionary({'type': 'analysis', 'id': analysis['_id']}) destination = containerutil.create_containerreference_from_dictionary({'type': 'analysis', 'id': analysis['_id']})
job = Job(gear_id, inputs, destination=destination, tags=tags, config_=job.get('config'), origin=origin) job = Job(gear_id, inputs, destination=destination, tags=tags, config_=job.get('config'), origin=origin)
job_id = job.insert() job_id = job.insert()
...@@ -534,6 +542,9 @@ class AnalysisStorage(ContainerStorage): ...@@ -534,6 +542,9 @@ class AnalysisStorage(ContainerStorage):
Update analysis if new job is found Update analysis if new job is found
""" """
# TODO fix import cycle - separate analysisstorage module?
from ..jobs.jobs import Job
if analysis.get('job') is None: if analysis.get('job') is None:
return analysis return analysis
try: try:
......
...@@ -11,7 +11,7 @@ SINGULAR_TO_PLURAL = { ...@@ -11,7 +11,7 @@ SINGULAR_TO_PLURAL = {
'acquisition': 'acquisitions', 'acquisition': 'acquisitions',
'analysis': 'analyses', 'analysis': 'analyses',
} }
PLURAL_TO_SINGULAR = {p: s for s, p in singular_to_plural.iteritems()} PLURAL_TO_SINGULAR = {p: s for s, p in SINGULAR_TO_PLURAL.iteritems()}
def get_perm(name): def get_perm(name):
...@@ -115,7 +115,7 @@ class ContainerReference(object): ...@@ -115,7 +115,7 @@ class ContainerReference(object):
raise Exception('Container id must be of type str') raise Exception('Container id must be of type str')
self.type = type self.type = type
self.collection = singular_to_plural[type] self.collection = pluralize(type)
self.id = id self.id = id
@classmethod @classmethod
...@@ -137,7 +137,7 @@ class ContainerReference(object): ...@@ -137,7 +137,7 @@ class ContainerReference(object):
if result is None: if result is None:
raise Exception('No such {} {} in database'.format(self.type, self.id)) raise Exception('No such {} {} in database'.format(self.type, self.id))
if 'parent' in result: if 'parent' in result:
parent_collection = singular_to_plural[result['parent']['type']] parent_collection = pluralize(result['parent']['type'])
parent = config.db[parent_collection].find_one({'_id': bson.ObjectId(result['parent']['id'])}) parent = config.db[parent_collection].find_one({'_id': bson.ObjectId(result['parent']['id'])})
if parent is None: if parent is None:
raise Exception('Cannot find parent {} {} of {} {}'.format( raise Exception('Cannot find parent {} {} of {} {}'.format(
...@@ -155,7 +155,7 @@ class ContainerReference(object): ...@@ -155,7 +155,7 @@ class ContainerReference(object):
def file_uri(self, filename): def file_uri(self, filename):
cont = self.get() cont = self.get()
if 'parent' in cont: if 'parent' in cont:
par_coll, par_id = singular_to_plural[cont['parent']['type']], cont['parent']['id'] par_coll, par_id = pluralize(cont['parent']['type']), cont['parent']['id']
return '/{}/{}/{}/{}/files/{}'.format(par_coll, par_id, self.collection, self.id, filename) return '/{}/{}/{}/{}/files/{}'.format(par_coll, par_id, self.collection, self.id, filename)
return '/{}/{}/files/{}'.format(self.collection, self.id, filename) return '/{}/{}/files/{}'.format(self.collection, self.id, filename)
......
...@@ -2,13 +2,10 @@ import bson.errors ...@@ -2,13 +2,10 @@ import bson.errors
import bson.objectid import bson.objectid
import datetime import datetime
from .. import config
from . import consistencychecker, containerutil
from . import APIStorageException, APIConflictException from . import APIStorageException, APIConflictException
from . import consistencychecker
from .. import config
from .containerstorage import SessionStorage, AcquisitionStorage from .containerstorage import SessionStorage, AcquisitionStorage
from .containerutil import create_filereference_from_dictionary, create_containerreference_from_dictionary, create_containerreference_from_filereference
from ..jobs.jobs import Job
from ..jobs.gears import validate_gear_config, get_gear
log = config.log log = config.log
......
...@@ -2,17 +2,17 @@ import bson ...@@ -2,17 +2,17 @@ import bson
import datetime import datetime
import dateutil import dateutil
from ..web import base
from .. import util
from .. import config from .. import config
from .. import util
from .. import validators from .. import validators
from ..auth import containerauth, always_ok from ..auth import containerauth, always_ok
from ..dao import APIStorageException, containerstorage, containerutil, noop from ..dao import APIStorageException, containerstorage, containerutil, noop
from ..dao.liststorage import AnalysesStorage from ..dao.containerstorage import AnalysisStorage
from ..types import Origin
from ..jobs.queue import Queue
from ..jobs.jobs import Job
from ..jobs.gears import get_gear from ..jobs.gears import get_gear
from ..jobs.jobs import Job
from ..jobs.queue import Queue
from ..types import Origin
from ..web import base
from ..web.request import log_access, AccessType from ..web.request import log_access, AccessType
log = config.log log = config.log
...@@ -208,7 +208,7 @@ class ContainerHandler(base.RequestHandler): ...@@ -208,7 +208,7 @@ class ContainerHandler(base.RequestHandler):
Given an object with an `analyses` array key, inflate job info for job-based analyses Given an object with an `analyses` array key, inflate job info for job-based analyses
""" """
for analysis in result.get('analyses', []): for analysis in result.get('analyses', []):
AnalysesStorage.inflate_job_info(analysis) AnalysisStorage.inflate_job_info(analysis)
return result return result
def _filter_permissions(self, result, uid, site): def _filter_permissions(self, result, uid, site):
......
...@@ -10,7 +10,6 @@ import zipfile ...@@ -10,7 +10,6 @@ import zipfile
from ..web import base from ..web import base
from .. import config from .. import config
from .. import upload from .. import upload
from .. import download
from .. import util from .. import util
from .. import validators from .. import validators
from ..auth import listauth, always_ok from ..auth import listauth, always_ok
...@@ -81,7 +80,10 @@ def initialize_list_configurations(): ...@@ -81,7 +80,10 @@ def initialize_list_configurations():
'projects': copy.deepcopy(container_default_configurations), 'projects': copy.deepcopy(container_default_configurations),
'sessions': copy.deepcopy(container_default_configurations), 'sessions': copy.deepcopy(container_default_configurations),
'acquisitions': copy.deepcopy(container_default_configurations), 'acquisitions': copy.deepcopy(container_default_configurations),
'collections': copy.deepcopy(container_default_configurations) 'collections': copy.deepcopy(container_default_configurations),
'analyses': {
'notes': copy.deepcopy(container_default_configurations['notes']),
},
} }
# preload the Storage instances for all configurations # preload the Storage instances for all configurations
for cont_name, cont_config in list_container_configurations.iteritems(): for cont_name, cont_config in list_container_configurations.iteritems():
......
...@@ -6,9 +6,16 @@ and are stored in their own collection instead of an embedded list on the ...@@ -6,9 +6,16 @@ and are stored in their own collection instead of an embedded list on the
container (eg. ListHandler) container (eg. ListHandler)
""" """
import os
import zipfile
from abc import ABCMeta, abstractproperty
from .. import config from .. import config
from .. import download
from .. import upload from .. import upload
from ..auth import refererauth, always_ok from .. import util
from .. import validators
from ..auth import containerauth, always_ok
from ..dao import containerstorage, noop from ..dao import containerstorage, noop
from ..web import base from ..web import base
from ..web.request import log_access, AccessType from ..web.request import log_access, AccessType
...@@ -18,30 +25,40 @@ log = config.log ...@@ -18,30 +25,40 @@ log = config.log
class RefererHandler(base.RequestHandler): class RefererHandler(base.RequestHandler):
def __init__() __metaclass__ = ABCMeta
referer_handler_configurations = { storage = abstractproperty()
'analyses': { storage_schema_file = abstractproperty()
'storage': payload_schema_file = abstractproperty()
'storage_schema_file': 'analysis.json', permchecker = containerauth.default_referer
'payload_schema_file': 'analysis.json',
'permchecker': refererauth.default_referer, @property
}, def mongo_validator(self):
} mongo_schema_uri = validators.schema_uri('mongo', self.storage_schema_file)
mongo_validator = validators.decorator_from_schema_path(mongo_schema_uri)
return mongo_validator
@property
def input_validator(self):
input_schema_uri = validators.schema_uri('input', self.payload_schema_file)
input_validator = validators.from_schema_path(input_schema_uri)
return input_validator
def _get_permchecker(self, container): def get_permchecker(self, parent_container):
if self.superuser_request: if self.superuser_request:
return always_ok return always_ok
elif self.public_request: elif self.public_request:
return refererauth.public_request(self, container) return containerauth.public_request(self, container=parent_container)
else: else:
permchecker = self.config['permchecker'] return self.permchecker(self, parent_container=parent_container)
return permchecker(self, container)
class AnalysesHandler(RefererHandler): class AnalysesHandler(RefererHandler):
def post(self, cont_name, cid, **kwargs): storage = containerstorage.AnalysisStorage()
storage_schema_file = 'analysis.json'
payload_schema_file = 'analysis.json'
def post(self, cont_name, cid):
""" """
Default behavior: Default behavior:
Creates an analysis object and uploads supplied input Creates an analysis object and uploads supplied input
...@@ -52,51 +69,31 @@ class AnalysesHandler(RefererHandler): ...@@ -52,51 +69,31 @@ class AnalysesHandler(RefererHandler):
analyses are only allowed at the session level. analyses are only allowed at the session level.
""" """
parent = self.storage.get_parent(cont_name, cid) parent = self.storage.get_parent(cont_name, cid)
permchecker = self._get_permchecker(container=container) permchecker = self._get_permchecker(parent)
permchecker(noop)('POST', container) permchecker(noop)('POST')
if self.is_true('job'): if self.is_true('job'):
if cont_name == 'sessions': if cont_name == 'sessions':
payload = self.request.json_body payload = self.request.json_body
payload_validator(payload.get('analysis',{}), 'POST') self.input_validator(payload.get('analysis', {}), 'POST')
analysis = payload.get('analysis') analysis = payload.get('analysis')
job = payload.get('job') job = payload.get('job')
if job is None or analysis is None: if job is None or analysis is None:
self.abort(400, 'JSON body must contain map for "analysis" and "job"') self.abort(400, 'JSON body must contain map for "analysis" and "job"')
result = self.storage.create_job_and_analysis(cont_name, _id, analysis, job, self.origin) result = self.storage.create_job_and_analysis(cont_name, cid, analysis, job, self.origin)
return {'_id': result['analysis']['_id']} return {'_id': result['analysis']['_id']}
else: else:
self.abort(400, 'Analysis created via a job must be at the session level') self.abort(400, 'Analysis created via a job must be at the session level')
# _id = kwargs.pop('cid')
# permchecker, storage, _, payload_validator, _ = self._initialize_request(cont_name, list_name, _id)
# permchecker(noop)('POST', _id=_id)
payload = upload.process_upload(self.request, upload.Strategy.analysis, origin=self.origin) payload = upload.process_upload(self.request, upload.Strategy.analysis, origin=self.origin)
analysis = self.storage.default_analysis(self.origin) analysis = self.storage.default_analysis(self.origin)
analysis.update(payload) analysis.update(payload)
result = self.storage.exec_op('POST', _id=_id, payload=analysis) result = self.storage.exec_op('POST', payload=analysis)
if result.modified_count == 1: if result.acknowledged:
return {'_id': analysis['_id']} return {'_id': result.inserted_id}
else:
self.abort(500, 'Element not added in list analyses of container {} {}'.format(cont_name, _id))
def _get_parent_container(self, payload):
if not self.config.get('parent_storage'):
return None, None
parent_storage = self.config['parent_storage']
parent_id_property = parent_storage.cont_name[:-1]
parent_id = payload.get(parent_id_property)
if parent_id:
parent_storage.dbc = config.db[parent_storage.cont_name]
parent_container = parent_storage.get_container(parent_id)
if parent_container is None:
self.abort(404, 'Element {} not found in container {}'.format(parent_id, parent_storage.cont_name))
else: else:
parent_container = None self.abort(500, 'Analysis not added for container {} {}'.format(cont_name, cid))
return parent_container, parent_id_property
@log_access(AccessType.delete_analysis) @log_access(AccessType.delete_analysis)
......
...@@ -6,9 +6,8 @@ import datetime ...@@ -6,9 +6,8 @@ import datetime
from .. import config from .. import config
from ..dao import APINotFoundException, APIStorageException from ..dao import APINotFoundException, APIStorageException
from ..dao.containerstorage import AcquisitionStorage from ..dao.containerstorage import AcquisitionStorage, AnalysisStorage
from ..dao.containerutil import create_filereference_from_dictionary, create_containerreference_from_filereference from ..dao.containerutil import create_filereference_from_dictionary, create_containerreference_from_filereference
from ..dao.liststorage import AnalysesStorage
from .jobs import Job from .jobs import Job
from .queue import Queue from .queue import Queue
from . import gears from . import gears
...@@ -149,7 +148,7 @@ def run(batch_job): ...@@ -149,7 +148,7 @@ def run(batch_job):
if not analysis.get('label'): if not analysis.get('label'):
time_now = datetime.datetime.utcnow() time_now = datetime.datetime.utcnow()
analysis['label'] = {'label': '{} {}'.format(gear_name, time_now)} analysis['label'] = {'label': '{} {}'.format(gear_name, time_now)}
an_storage = AnalysesStorage('sessions', 'analyses', use_object_id=True) an_storage = AnalysisStorage()
acq_storage = AcquisitionStorage() acq_storage = AcquisitionStorage()
jobs = [] jobs = []
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment