From 1e9b76105c0faaa8f95a4e89e95cc2c765a01232 Mon Sep 17 00:00:00 2001 From: Megan Henning <meganhenning@flywheel.io> Date: Thu, 21 Jul 2016 17:48:24 -0500 Subject: [PATCH] Fix most pylint warnings --- api/api.py | 2 +- api/base.py | 8 ++-- api/centralclient.py | 28 +++---------- api/dao/liststorage.py | 2 +- api/handlers/collectionshandler.py | 50 +++++++++++------------ api/handlers/grouphandler.py | 12 +----- api/handlers/listhandler.py | 65 +++++++++++++----------------- api/handlers/reporthandler.py | 8 ++-- api/handlers/schemahandler.py | 3 +- api/handlers/searchhandler.py | 5 ++- api/handlers/userhandler.py | 20 +++------ api/jobs/jobs.py | 2 +- api/jobs/queue.py | 4 +- api/placer.py | 6 +-- api/search/queryprocessor.py | 3 +- 15 files changed, 85 insertions(+), 133 deletions(-) diff --git a/api/api.py b/api/api.py index f18db054..b6d92dd6 100644 --- a/api/api.py +++ b/api/api.py @@ -169,7 +169,7 @@ routing_regexes = { 'note_id_re': '[0-9a-f]{24}', # schema regex # example: schema_path/schema.json - 'schema_re': '[^/.]{3,60}/[^/.]{3,60}\.json' + 'schema_re': r'[^/.]{3,60}/[^/.]{3,60}\.json' } def _format(route): diff --git a/api/base.py b/api/base.py index 1e2a01ce..0cb67415 100644 --- a/api/base.py +++ b/api/base.py @@ -132,10 +132,10 @@ class RequestHandler(webapp2.RequestHandler): if cached_token: uid = cached_token['uid'] - log.debug('looked up cached token in %dms' % ((datetime.datetime.utcnow() - timestamp).total_seconds() * 1000.)) + log.debug('looked up cached token in %dms', ((datetime.datetime.utcnow() - timestamp).total_seconds() * 1000.)) else: uid = self.validate_oauth_token(access_token, timestamp) - log.debug('looked up remote token in %dms' % ((datetime.datetime.utcnow() - timestamp).total_seconds() * 1000.)) + log.debug('looked up remote token in %dms', ((datetime.datetime.utcnow() - timestamp).total_seconds() * 1000.)) # Cache the token for future requests config.db.authtokens.replace_one({'_id': access_token}, {'uid': uid, 'timestamp': timestamp}, upsert=True) @@ -296,7 +296,7 @@ class RequestHandler(webapp2.RequestHandler): site_id = config.get_item('site', 'id') target_site = self.get_param('site', site_id) if target_site == site_id: - log.debug('from %s %s %s %s %s' % (self.source_site, self.uid, self.request.method, self.request.path, str(self.request.GET.mixed()))) + log.debug('from %s %s %s %s %s', self.source_site, self.uid, self.request.method, self.request.path, str(self.request.GET.mixed())) return super(RequestHandler, self).dispatch() else: if not site_id: @@ -318,7 +318,7 @@ class RequestHandler(webapp2.RequestHandler): params = self.request.GET.mixed() if 'user' in params: del params['user'] del params['site'] - log.debug(' for %s %s %s %s %s' % (target_site, self.uid, self.request.method, self.request.path, str(self.request.GET.mixed()))) + log.debug(' for %s %s %s %s %s', target_site, self.uid, self.request.method, self.request.path, str(self.request.GET.mixed())) target_uri = target['api_uri'] + self.request.path.split('/api')[1] r = requests.request( self.request.method, diff --git a/api/centralclient.py b/api/centralclient.py index 711289fd..5c2294a5 100644 --- a/api/centralclient.py +++ b/api/centralclient.py @@ -44,8 +44,8 @@ def update(db, api_uri, site_name, site_id, ssl_cert, central_url): response = (json.loads(r.content)) sites = response.get('sites') users = response.get('users') - log.debug('recieved sites: %s ' % ', '.join(s['_id'] for s in sites)) - log.debug('recieved users: %s' % ', '.join([key for key in users])) + log.debug('recieved sites: %s ', ', '.join(s['_id'] for s in sites)) + log.debug('recieved users: %s', ', '.join([key for key in users])) if response.get('users'): for _id, remotes in response['users'].iteritems(): db.users.update_one({'_id': _id}, {'$set': {'remotes': remotes}}) @@ -57,10 +57,10 @@ def update(db, api_uri, site_name, site_id, ssl_cert, central_url): {'remotes': {'$exists': True}, '_id': {'$nin': users.keys()}}, {'$unset': {'remotes': ''}}, ) - log.info('%3d users with remote data, %3d remotes' % ( + log.info('%3d users with remote data, %3d remotes' , len([u['_id'] for u in db.users.find({'remotes': {'$exists': True}}, {'_id': True})]), len([s['_id'] for s in db.sites.find({}, {'_id': True})]) - )) + ) return True else: # r.reason contains generic description for the specific error code @@ -70,7 +70,7 @@ def update(db, api_uri, site_name, site_id, ssl_cert, central_url): msg = reason.group(1) else: msg = r.reason - log.warning('%s - %s' % (r.status_code, msg)) + log.warning('%s - %s', r.status_code, msg) return False @@ -139,21 +139,3 @@ class CentralClient(base.RequestHandler): self.abort(404, 'register endpoint is not implemented') # every request to this route is aborted at the moment - if not config.get_item('site', 'registered'): - self.abort(400, 'Site not registered with central') - if not config.get_item('site', 'ssl_cert'): - self.abort(400, 'SSL cert not configured') - if not config.get_item('site', 'central_url'): - self.abort(400, 'Central URL not configured') - if not update(db=config.db, - api_uri=config.get_item('site', 'api_url'), - site_name=config.get_item('site', 'name'), - site_id=config.get_item('site', 'id'), - ssl_cert=config.get_item('site', 'ssl_cert'), - central_url=config.get_item('site', 'central_url'),): - fail_count += 1 - else: - fail_count = 0 - if fail_count == 3: - log.warning('scitran central unreachable, purging all remotes info') - clean_remotes(db=config.db, site_id=config.get_item('site', 'id')) diff --git a/api/dao/liststorage.py b/api/dao/liststorage.py index 1bd902d7..c4f0ac74 100644 --- a/api/dao/liststorage.py +++ b/api/dao/liststorage.py @@ -270,7 +270,7 @@ class AnalysesStorage(ListStorage): files.append(file_) q = {'analyses._id': analysis['_id']} - u = {'$set': {'analyses.$.job': job._id, 'analyses.$.files': files}} + u = {'$set': {'analyses.$.job': job.id_, 'analyses.$.files': files}} config.db.sessions.update_one(q, u) analysis['job'] = job diff --git a/api/handlers/collectionshandler.py b/api/handlers/collectionshandler.py index 24b52ff8..a740fca4 100644 --- a/api/handlers/collectionshandler.py +++ b/api/handlers/collectionshandler.py @@ -6,12 +6,13 @@ from ..auth import containerauth, always_ok from ..dao import containerstorage from ..dao import APIStorageException -from containerhandler import ContainerHandler +from .containerhandler import ContainerHandler log = config.log class CollectionsHandler(ContainerHandler): + # pylint: disable=arguments-differ container_handler_configurations = ContainerHandler.container_handler_configurations @@ -23,9 +24,13 @@ class CollectionsHandler(ContainerHandler): 'list_projection': {'metadata': 0} } + def __init__(self, request=None, response=None): + super(CollectionsHandler, self).__init__(request, response) + + + def post(self, **kwargs): - self.config = self.container_handler_configurations['collections'] - self.storage = self.config['storage'] + storage = self.container_handler_configurations['collections']['storage'] mongo_validator, payload_validator = self._get_validators() payload = self.request.json_body @@ -38,7 +43,7 @@ class CollectionsHandler(ContainerHandler): }] payload['curator'] = self.uid payload['created'] = payload['modified'] = datetime.datetime.utcnow() - result = mongo_validator(self.storage.exec_op)('POST', payload=payload) + result = mongo_validator(storage.exec_op)('POST', payload=payload) if result.acknowledged: return {'_id': result.inserted_id} @@ -47,8 +52,7 @@ class CollectionsHandler(ContainerHandler): def put(self, **kwargs): _id = kwargs.pop('cid') - self.config = self.container_handler_configurations['collections'] - self.storage = self.config['storage'] + storage = self.container_handler_configurations['collections']['storage'] container = self._get_container(_id) mongo_validator, payload_validator = self._get_validators() @@ -58,7 +62,7 @@ class CollectionsHandler(ContainerHandler): permchecker = self._get_permchecker(container) payload['modified'] = datetime.datetime.utcnow() try: - result = mongo_validator(permchecker(self.storage.exec_op))('PUT', _id=_id, payload=payload) + result = mongo_validator(permchecker(storage.exec_op))('PUT', _id=_id, payload=payload) except APIStorageException as e: self.abort(400, e.message) @@ -66,7 +70,7 @@ class CollectionsHandler(ContainerHandler): self._add_contents(contents, _id) return {'modified': result.modified_count} else: - self.abort(404, 'Element not updated in collection {} {}'.format(self.storage.cont_name, _id)) + self.abort(404, 'Element not updated in collection {} {}'.format(storage.cont_name, _id)) def _add_contents(self, contents, _id): if not contents: @@ -95,20 +99,18 @@ class CollectionsHandler(ContainerHandler): config.db.acquisitions.update_many({'collections': bson.ObjectId(_id)}, {'$pull': {'collections': bson.ObjectId(_id)}}) def get_all(self, cont_name): - self.config = self.container_handler_configurations[cont_name] - self.storage = self.config['storage'] - projection = self.config['list_projection'] + storage = self.container_handler_configurations['collections']['storage'] + projection = self.container_handler_configurations['collections']['list_projection'] if self.superuser_request: permchecker = always_ok elif self.public_request: permchecker = containerauth.list_public_request else: - admin_only = self.is_true('admin') - permchecker = containerauth.list_permission_checker(self, admin_only) + permchecker = containerauth.list_permission_checker(self) query = {} - results = permchecker(self.storage.exec_op)('GET', query=query, public=self.public_request, projection=projection) + results = permchecker(storage.exec_op)('GET', query=query, public=self.public_request, projection=projection) if results is None: - self.abort(404, 'Element not found in collection {}'.format(self.storage.cont_name)) + self.abort(404, 'Element not found in collection {}'.format(storage.cont_name)) self._filter_all_permissions(results, self.uid, self.user_site) if self.is_true('counts'): self._add_results_counts(results) @@ -136,16 +138,15 @@ class CollectionsHandler(ContainerHandler): curator_ids = list(set((c['curator'] for c in self.get_all('collections')))) return list(config.db.users.find({'_id': {'$in': curator_ids}}, ['firstname', 'lastname'])) - def get_sessions(self, cont_name, cid): + def get_sessions(self, cid): """Return the list of sessions in a collection.""" - # FIXME use storage and permission checking abstractions - self.config = self.container_handler_configurations['collections'] - self.storage = self.config['storage'] + # TODO use storage and permission checking abstractions + storage = self.container_handler_configurations['collections']['storage'] if not bson.ObjectId.is_valid(cid): self.abort(400, 'not a valid object id') _id = bson.ObjectId(cid) - if not self.storage.dbc.find_one({'_id': _id}): + if not storage.dbc.find_one({'_id': _id}): self.abort(404, 'no such Collection') agg_res = config.db.acquisitions.aggregate([ {'$match': {'collections': _id}}, @@ -168,16 +169,15 @@ class CollectionsHandler(ContainerHandler): sess['debug']['acquisitions'] = self.uri_for('coll_acq', cont_name='collections', cid=cid, _full=True) + '?session=%s&user=%s' % (sid, self.get_param('user', '')) return sessions - def get_acquisitions(self, cid, **kwargs): + def get_acquisitions(self, cid): """Return the list of acquisitions in a collection.""" - # FIXME use storage and permission checking abstractions - self.config = self.container_handler_configurations['collections'] - self.storage = self.config['storage'] + # TODO use storage and permission checking abstractions + storage = self.container_handler_configurations['collections']['storage'] if not bson.ObjectId.is_valid(cid): self.abort(400, 'not a valid object id') _id = bson.ObjectId(cid) - if not self.storage.dbc.find_one({'_id': _id}): + if not storage.dbc.find_one({'_id': _id}): self.abort(404, 'no such Collection') query = {'collections': _id} sid = self.get_param('session', '') diff --git a/api/handlers/grouphandler.py b/api/handlers/grouphandler.py index 6d1e0785..07f56893 100644 --- a/api/handlers/grouphandler.py +++ b/api/handlers/grouphandler.py @@ -5,7 +5,7 @@ from .. import util from .. import config from .. import debuginfo from .. import validators -from ..auth import groupauth, always_ok +from ..auth import groupauth from ..dao import containerstorage log = config.log @@ -15,9 +15,9 @@ class GroupHandler(base.RequestHandler): def __init__(self, request=None, response=None): super(GroupHandler, self).__init__(request, response) + self.storage = containerstorage.GroupStorage('groups', use_object_id=False) def get(self, _id): - self._init_storage() group = self._get_group(_id) if not group: self.abort(404, 'no such Group: ' + _id) @@ -30,7 +30,6 @@ class GroupHandler(base.RequestHandler): def delete(self, _id): if _id == 'unknown': self.abort(400, 'The group "unknown" can\'t be deleted as it is integral within the API') - self._init_storage() group = self._get_group(_id) if not group: self.abort(404, 'no such Group: ' + _id) @@ -43,8 +42,6 @@ class GroupHandler(base.RequestHandler): return result def get_all(self, uid=None): - self._init_storage() - query = None projection = {'name': 1, 'created': 1, 'modified': 1, 'roles': [], 'tags': []} permchecker = groupauth.list_permission_checker(self, uid) results = permchecker(self.storage.exec_op)('GET', projection=projection) @@ -57,7 +54,6 @@ class GroupHandler(base.RequestHandler): return results def put(self, _id): - self._init_storage() group = self._get_group(_id) if not group: self.abort(404, 'no such Group: ' + _id) @@ -75,7 +71,6 @@ class GroupHandler(base.RequestHandler): self.abort(404, 'Group {} not updated'.format(_id)) def post(self): - self._init_storage() permchecker = groupauth.default(self, None) payload = self.request.json_body mongo_schema_uri = validators.schema_uri('mongo', 'group.json') @@ -95,9 +90,6 @@ class GroupHandler(base.RequestHandler): else: self.abort(404, 'Group {} not updated'.format(payload['_id'])) - def _init_storage(self): - self.storage = containerstorage.GroupStorage('groups', use_object_id=False) - def _get_group(self, _id): group = self.storage.get_container(_id) if group is not None: diff --git a/api/handlers/listhandler.py b/api/handlers/listhandler.py index 1b0f0ae0..962bf2cc 100644 --- a/api/handlers/listhandler.py +++ b/api/handlers/listhandler.py @@ -9,10 +9,7 @@ import zipfile from .. import base from .. import config -from .. import files -from ..jobs import rules from ..jobs.jobs import Job -from .. import tempdir as tempfile from .. import upload from .. import download from .. import util @@ -73,7 +70,7 @@ def initialize_list_configurations(): 'input_schema_file': 'analysis.json' } } - list_handler_configurations = { + list_container_configurations = { 'groups': { 'roles':{ 'storage': liststorage.ListStorage, @@ -97,7 +94,7 @@ def initialize_list_configurations(): 'collections': copy.deepcopy(container_default_configurations) } # preload the Storage instances for all configurations - for cont_name, cont_config in list_handler_configurations.iteritems(): + for cont_name, cont_config in list_container_configurations.iteritems(): for list_name, list_config in cont_config.iteritems(): storage_class = list_config['storage'] storage = storage_class( @@ -106,7 +103,7 @@ def initialize_list_configurations(): use_object_id=list_config.get('use_object_id', False) ) list_config['storage'] = storage - return list_handler_configurations + return list_container_configurations list_handler_configurations = initialize_list_configurations() @@ -130,7 +127,7 @@ class ListHandler(base.RequestHandler): def get(self, cont_name, list_name, **kwargs): _id = kwargs.pop('cid') - container, permchecker, storage, _, _, keycheck = self._initialize_request(cont_name, list_name, _id, query_params=kwargs) + permchecker, storage, _, _, keycheck = self._initialize_request(cont_name, list_name, _id, query_params=kwargs) try: result = keycheck(permchecker(storage.exec_op))('GET', _id, query_params=kwargs) except APIStorageException as e: @@ -142,7 +139,7 @@ class ListHandler(base.RequestHandler): def post(self, cont_name, list_name, **kwargs): _id = kwargs.pop('cid') - container, permchecker, storage, mongo_validator, payload_validator, keycheck = self._initialize_request(cont_name, list_name, _id) + permchecker, storage, mongo_validator, payload_validator, keycheck = self._initialize_request(cont_name, list_name, _id) payload = self.request.json_body payload_validator(payload, 'POST') @@ -155,7 +152,7 @@ class ListHandler(base.RequestHandler): def put(self, cont_name, list_name, **kwargs): _id = kwargs.pop('cid') - container, permchecker, storage, mongo_validator, payload_validator, keycheck = self._initialize_request(cont_name, list_name, _id, query_params=kwargs) + permchecker, storage, mongo_validator, payload_validator, keycheck = self._initialize_request(cont_name, list_name, _id, query_params=kwargs) payload = self.request.json_body payload_validator(payload, 'PUT') @@ -171,7 +168,7 @@ class ListHandler(base.RequestHandler): def delete(self, cont_name, list_name, **kwargs): _id = kwargs.pop('cid') - container, permchecker, storage, _, _, keycheck = self._initialize_request(cont_name, list_name, _id, query_params=kwargs) + permchecker, storage, _, _, keycheck = self._initialize_request(cont_name, list_name, _id, query_params=kwargs) try: result = keycheck(permchecker(storage.exec_op))('DELETE', _id, query_params=kwargs) except APIStorageException as e: @@ -191,11 +188,11 @@ class ListHandler(base.RequestHandler): 5) the mongo_validator that will check what will be sent to mongo against a json schema 6) the keycheck decorator validating the request key """ - config = list_handler_configurations[cont_name][list_name] - storage = config['storage'] - permchecker = config['permchecker'] - if config.get('get_full_container'): - query_params = None + conf = list_handler_configurations[cont_name][list_name] + storage = conf['storage'] + permchecker = conf['permchecker'] + if conf.get('get_full_container'): + query_params = None container = storage.get_container(_id, query_params) if container is not None: if self.superuser_request: @@ -206,12 +203,12 @@ class ListHandler(base.RequestHandler): permchecker = permchecker(self, container) else: self.abort(404, 'Element {} not found in container {}'.format(_id, storage.cont_name)) - mongo_schema_uri = validators.schema_uri('mongo', config.get('storage_schema_file')) + mongo_schema_uri = validators.schema_uri('mongo', conf.get('storage_schema_file')) mongo_validator = validators.decorator_from_schema_path(mongo_schema_uri) - input_schema_uri = validators.schema_uri('input', config.get('input_schema_file')) + input_schema_uri = validators.schema_uri('input', conf.get('input_schema_file')) input_validator = validators.from_schema_path(input_schema_uri) keycheck = validators.key_check(mongo_schema_uri) - return container, permchecker, storage, mongo_validator, input_validator, keycheck + return permchecker, storage, mongo_validator, input_validator, keycheck class PermissionsListHandler(ListHandler): @@ -248,7 +245,7 @@ class PermissionsListHandler(ListHandler): 'permissions': config.db.projects.find_one({'_id': oid},{'permissions': 1})['permissions'] }} hierarchy.propagate_changes(cont_name, oid, {}, update) - except: + except APIStorageException: self.abort(500, 'permissions not propagated from project {} to sessions'.format(_id)) @@ -260,7 +257,7 @@ class NotesListHandler(ListHandler): def post(self, cont_name, list_name, **kwargs): _id = kwargs.pop('cid') - container, permchecker, storage, mongo_validator, input_validator, keycheck = self._initialize_request(cont_name, list_name, _id) + permchecker, storage, mongo_validator, input_validator, keycheck = self._initialize_request(cont_name, list_name, _id) payload = self.request.json_body input_validator(payload, 'POST') @@ -278,7 +275,7 @@ class NotesListHandler(ListHandler): def put(self, cont_name, list_name, **kwargs): _id = kwargs.pop('cid') - container, permchecker, storage, mongo_validator, input_validator, keycheck = self._initialize_request(cont_name, list_name, _id, query_params=kwargs) + permchecker, storage, mongo_validator, input_validator, keycheck = self._initialize_request(cont_name, list_name, _id, query_params=kwargs) payload = self.request.json_body input_validator(payload, 'PUT') @@ -327,7 +324,7 @@ class TagsListHandler(ListHandler): """ try: hierarchy.propagate_changes(cont_name, _id, query, update) - except: + except APIStorageException: self.abort(500, 'tag change not propagated from group {}'.format(_id)) @@ -406,7 +403,7 @@ class FileListHandler(ListHandler): """ _id = kwargs.pop('cid') - container, permchecker, storage, _, _, keycheck = self._initialize_request(cont_name, list_name, _id) + permchecker, storage, _, _, keycheck = self._initialize_request(cont_name, list_name, _id) list_name = storage.list_name filename = kwargs.get('name') @@ -464,12 +461,6 @@ class FileListHandler(ListHandler): self.response.headers['Content-Type'] = 'application/octet-stream' self.response.headers['Content-Disposition'] = 'attachment; filename="' + filename + '"' - def delete(self, cont_name, list_name, **kwargs): - filename = kwargs.get('name') - _id = kwargs.get('cid') - result = super(FileListHandler, self).delete(cont_name, list_name, **kwargs) - return result - def post(self, cont_name, list_name, **kwargs): _id = kwargs.pop('cid') @@ -481,10 +472,10 @@ class FileListHandler(ListHandler): cont_name_plural = cont_name + 's' # Authorize - container, permchecker, storage, mongo_validator, payload_validator, keycheck = self._initialize_request(cont_name_plural, list_name, _id) + permchecker, _, _, _, _ = self._initialize_request(cont_name_plural, list_name, _id) permchecker(noop)('POST', _id=_id) - return upload.process_upload(self.request, upload.Strategy.targeted, container_type=cont_name, id=_id, origin=self.origin) + return upload.process_upload(self.request, upload.Strategy.targeted, container_type=cont_name, id_=_id, origin=self.origin) def _check_packfile_token(self, project_id, token_id, check_user=True): """ @@ -563,7 +554,7 @@ class FileListHandler(ListHandler): 'token': str(result.inserted_id) } - def packfile(self, cont_name, **kwargs): + def packfile(self, **kwargs): """ Add files to an in-progress packfile. """ @@ -574,7 +565,7 @@ class FileListHandler(ListHandler): return upload.process_upload(self.request, upload.Strategy.token, origin=self.origin, context={'token': token_id}) - def packfile_end(self, cont_name, **kwargs): + def packfile_end(self, **kwargs): """ Complete and save an uploaded packfile. """ @@ -678,7 +669,7 @@ class AnalysesHandler(ListHandler): """ _id = kwargs.pop('cid') - container, permchecker, storage, mongo_validator, _, keycheck = self._initialize_request(cont_name, list_name, _id) + permchecker, storage, mongo_validator, _, keycheck = self._initialize_request(cont_name, list_name, _id) permchecker(noop)('POST', _id=_id) if self.is_true('job'): @@ -840,7 +831,7 @@ class AnalysesHandler(ListHandler): """ _id = kwargs.pop('cid') - container, permchecker, storage, _, _, _ = self._initialize_request(cont_name, list_name, _id) + permchecker, storage, _, _, _ = self._initialize_request(cont_name, list_name, _id) filename = kwargs.get('name') ticket_id = self.get_param('ticket') if not ticket_id: @@ -910,7 +901,7 @@ class AnalysesHandler(ListHandler): def delete_note(self, cont_name, list_name, **kwargs): _id = kwargs.pop('cid') analysis_id = kwargs.pop('_id') - container, permchecker, storage, _, _, _ = self._initialize_request(cont_name, list_name, _id) + permchecker, storage, _, _, _ = self._initialize_request(cont_name, list_name, _id) note_id = kwargs.get('note_id') permchecker(noop)('DELETE', _id=_id) result = storage.delete_note(_id=_id, analysis_id=analysis_id, note_id=note_id) @@ -922,7 +913,7 @@ class AnalysesHandler(ListHandler): def add_note(self, cont_name, list_name, **kwargs): _id = kwargs.pop('cid') analysis_id = kwargs.get('_id') - container, permchecker, storage, mongo_validator, input_validator, keycheck = self._initialize_request(cont_name, list_name, _id) + permchecker, storage, _, input_validator, _ = self._initialize_request(cont_name, list_name, _id) payload = self.request.json_body input_validator(payload, 'POST') payload['_id'] = str(bson.objectid.ObjectId()) diff --git a/api/handlers/reporthandler.py b/api/handlers/reporthandler.py index 52347e51..6855d13e 100644 --- a/api/handlers/reporthandler.py +++ b/api/handlers/reporthandler.py @@ -1,11 +1,9 @@ -import json import bson import dateutil import copy from .. import base from .. import config -from .. import util log = config.log @@ -19,7 +17,7 @@ class ReportHandler(base.RequestHandler): def __init__(self, request=None, response=None): super(ReportHandler, self).__init__(request, response) - def get(self, report_type, **kwargs): + def get(self, report_type): report = None if report_type == 'site': report = SiteReport() @@ -38,7 +36,7 @@ class ReportHandler(base.RequestHandler): if end_date is not None and start_date is not None and end_date < start_date: self.abort(400, 'End date {} is before start date {}'.format(end_date, start_date)) - report = ProjectReport(map(bson.ObjectId, project_list), + report = ProjectReport([bson.ObjectId(id_) for id_ in project_list], start_date=start_date, end_date=end_date) @@ -193,7 +191,7 @@ class ProjectReport(Report): if perm.get('access') == 'admin': admins.append(perm.get('_id')) admin_objs = config.db.users.find({'_id': {'$in': admins}}) - project['admins'] = map(lambda x: x.get('firstname','')+' '+x.get('lastname',''), admin_objs) + project['admins'] = map(lambda x: x.get('firstname','')+' '+x.get('lastname',''), admin_objs) # pylint: disable=bad-builtin, deprecated-lambda base_query = self._base_query(p['_id']) project['session_count'] = config.db.sessions.count(base_query) diff --git a/api/handlers/schemahandler.py b/api/handlers/schemahandler.py index 18eb4a00..f672e38b 100644 --- a/api/handlers/schemahandler.py +++ b/api/handlers/schemahandler.py @@ -1,6 +1,5 @@ import os import json -import datetime from .. import base from .. import config @@ -12,7 +11,7 @@ class SchemaHandler(base.RequestHandler): def __init__(self, request=None, response=None): super(SchemaHandler, self).__init__(request, response) - def get(self, schema, **kwargs): + def get(self, schema): schema_path = os.path.join(config.get_item('persistent', 'schema_path'), schema) try: with open(schema_path, 'ru') as f: diff --git a/api/handlers/searchhandler.py b/api/handlers/searchhandler.py index eca3f677..891e04f2 100644 --- a/api/handlers/searchhandler.py +++ b/api/handlers/searchhandler.py @@ -67,8 +67,9 @@ class SearchHandler(base.RequestHandler): def __init__(self, request=None, response=None): super(SearchHandler, self).__init__(request, response) + self.search_containers = None - def advanced_search(self, **kwargs): + def advanced_search(self): if self.public_request: self.abort(403, 'search is available only for authenticated users') queries = self.request.json_body @@ -113,7 +114,7 @@ class SearchHandler(base.RequestHandler): parents.update(self._get_parents(parent_container, parent_name)) return parents - def get_datatree(self, **kwargs): + def get_datatree(self): if self.public_request: self.abort(403, 'search is available only for authenticated users') size = self.get_param('size') diff --git a/api/handlers/userhandler.py b/api/handlers/userhandler.py index 1ef7f4db..73fe2e72 100644 --- a/api/handlers/userhandler.py +++ b/api/handlers/userhandler.py @@ -7,9 +7,9 @@ from .. import base from .. import util from .. import config from .. import validators -from ..auth import userauth, always_ok, ROLES +from ..auth import userauth from ..dao import containerstorage -from ..dao import noop +from ..dao import noop, APIStorageException log = config.log @@ -18,9 +18,9 @@ class UserHandler(base.RequestHandler): def __init__(self, request=None, response=None): super(UserHandler, self).__init__(request, response) + self.storage = containerstorage.ContainerStorage('users', use_object_id=False) def get(self, _id): - self._init_storage() user = self._get_user(_id) permchecker = userauth.default(self, user) result = permchecker(self.storage.exec_op)('GET', _id, projection={'api_key': 0} or None) @@ -30,7 +30,6 @@ class UserHandler(base.RequestHandler): def self(self): """Return details for the current User.""" - self._init_storage() if not self.uid: self.abort(400, 'no user is logged in') user = self.storage.exec_op('GET', self.uid) @@ -47,7 +46,6 @@ class UserHandler(base.RequestHandler): return result def delete(self, _id): - self._init_storage() user = self._get_user(_id) permchecker = userauth.default(self, user) # Check for authZ before cleaning up user permissions @@ -94,8 +92,6 @@ class UserHandler(base.RequestHandler): {"modified": 1} """ - - self._init_storage() user = self._get_user(_id) permchecker = userauth.default(self, user) payload = self.request.json_body @@ -159,9 +155,6 @@ class UserHandler(base.RequestHandler): else: self.abort(404, 'User {} not updated'.format(payload['_id'])) - def _init_storage(self): - self.storage = containerstorage.ContainerStorage('users', use_object_id=False) - def _cleanup_user_permissions(self, uid): try: config.db.collections.delete_many({'curator': uid}) @@ -172,17 +165,15 @@ class UserHandler(base.RequestHandler): config.db.projects.update_many(query, update) config.db.sessions.update_many(query, update) config.db.acquisitions.update_many(query, update) - except: + except APIStorageException: self.abort(500, 'Site-wide user permissions for {} were unabled to be removed'.format(uid)) def avatar(self, uid): - self._init_storage() self.resolve_avatar(uid, default=self.request.GET.get('default')) def self_avatar(self): if self.uid is None: self.abort(404, 'not a logged-in user') - self._init_storage() self.resolve_avatar(self.uid, default=self.request.GET.get('default')) def resolve_avatar(self, email, default=None): @@ -194,11 +185,10 @@ class UserHandler(base.RequestHandler): # Storage throws a 404; we want to catch that and handle it separately in the case of a provided default. try: user = self._get_user(email) - except: + except APIStorageException: user = {} avatar = user.get('avatar', None) - avatars = user.get('avatars', {}) # If the user exists but has no set avatar, try to get one if user and avatar is None: diff --git a/api/jobs/jobs.py b/api/jobs/jobs.py index 0f320d28..dc5a3b4c 100644 --- a/api/jobs/jobs.py +++ b/api/jobs/jobs.py @@ -97,7 +97,7 @@ class Job(object): d['_id'] = str(d['_id']) - return cls(d['name'], d.get('inputs', None), destination=d.get('destination', None), tags=d['tags'], attempt=d['attempt'], previous_job_id=d.get('previous_job_id', None), created=d['created'], modified=d['modified'], state=d['state'], request=d.get('request', None), _id=d['_id']) + return cls(d['name'], d.get('inputs', None), destination=d.get('destination', None), tags=d['tags'], attempt=d['attempt'], previous_job_id=d.get('previous_job_id', None), created=d['created'], modified=d['modified'], state=d['state'], request=d.get('request', None), id_=d['_id']) @classmethod def get(cls, _id): diff --git a/api/jobs/queue.py b/api/jobs/queue.py index 91179067..fccb3b65 100644 --- a/api/jobs/queue.py +++ b/api/jobs/queue.py @@ -83,7 +83,7 @@ class Queue(object): """ if job.attempt >= max_attempts() and not force: - log.info('Permanently failed job %s (after %d attempts)' % (job.id_, job.attempt)) + log.info('Permanently failed job %s (after %d attempts)', job.id_, job.attempt) return if job.state != 'failed': @@ -109,7 +109,7 @@ class Queue(object): new_job.modified = now new_id = new_job.insert() - log.info('respawned job %s as %s (attempt %d)' % (job.id_, new_id, new_job.attempt)) + log.info('respawned job %s as %s (attempt %d)', job.id_, new_id, new_job.attempt) return new_id diff --git a/api/placer.py b/api/placer.py index 5e894500..b1c927c8 100644 --- a/api/placer.py +++ b/api/placer.py @@ -161,9 +161,9 @@ class UIDPlacer(Placer): container = target['container'] r_metadata = target['metadata'] - self.container_type = container.level - self.id = container._id - self.container = container.container + self.container_type = container['level'] + self.id = container['_id'] + self.container = container['container'] info.update(r_metadata) diff --git a/api/search/queryprocessor.py b/api/search/queryprocessor.py index bf76bb0b..82896896 100644 --- a/api/search/queryprocessor.py +++ b/api/search/queryprocessor.py @@ -148,9 +148,8 @@ class TargetProperty(object): class TargetInAnalysis(TargetProperty): def __init__(self, name, query, analyses_query): + super(TargetInAnalysis, self).__init__(name, query) self.target_analysys = TargetProperty('analyses', analyses_query) - self.name = name - self.query = query def get_results(self, parent_name, parent_results): analysis_list = self.target_analysys.get_results(parent_name, parent_results) -- GitLab