diff --git a/api/dao/containerstorage.py b/api/dao/containerstorage.py index de279038563f3223c6cacd1ec24814e6001562d1..5c221b17589bd1e41d7cdf89d1b1af6de3807433 100644 --- a/api/dao/containerstorage.py +++ b/api/dao/containerstorage.py @@ -187,8 +187,7 @@ class SessionStorage(ContainerStorage): return True return False - def get_all_for_targets(self, target_type, target_ids, - user=None, projection=None, include_archived=True): + def get_all_for_targets(self, target_type, target_ids, user=None, projection=None): """ Given a container type and list of ids, get all sessions that are in those hierarchies. @@ -199,13 +198,9 @@ class SessionStorage(ContainerStorage): If user is supplied, will only return sessions with user in its perms list. If projection is supplied, it will be applied to the session query. - If inlude_archived is false, it will ignore archived sessions. """ query = {} - if not include_archived: - query['archived'] = {'$ne': True} - target_type = containerutil.singularize(target_type) if target_type == 'project': @@ -253,8 +248,7 @@ class AcquisitionStorage(ContainerStorage): SessionStorage().recalc_session_compliance(acquisition['session']) return result - def get_all_for_targets(self, target_type, target_ids, - user=None, projection=None, collection_id=None, include_archived=True): + def get_all_for_targets(self, target_type, target_ids, user=None, projection=None, collection_id=None): """ Given a container type and list of ids, get all acquisitions that are in those hierarchies. @@ -266,13 +260,9 @@ class AcquisitionStorage(ContainerStorage): If user is supplied, will only return acquisitions with user in its perms list. If projection is supplied, it will be applied to the acquisition query. If colllection is supplied, the collection context will be used to query acquisitions. - If inlude_archived is false, it will ignore archived acquisitions. - - if target_type is 'project', it will ignore sessions in the project that are archived """ query = {} - if not include_archived: - query['archived'] = {'$ne': True} # If target_type is 'acquisitions', it just wraps self.get_all_el with a query containing # all acquisition ids. @@ -340,9 +330,8 @@ class AnalysisStorage(ContainerStorage): } for key in defaults: analysis.setdefault(key, defaults[key]) - for key in ('public', 'archived'): - if key in parent: - analysis.setdefault(key, parent[key]) + if 'public' in parent: + analysis.setdefault('public', parent['public']) def create_job_and_analysis(self, cont_name, cid, analysis, job, origin, uid): diff --git a/api/dao/containerutil.py b/api/dao/containerutil.py index bc748a0c5f737e5ab93714cbaeec9b14689cff20..1b9c4123f35f565311c28504cc419363620bed9d 100644 --- a/api/dao/containerutil.py +++ b/api/dao/containerutil.py @@ -92,9 +92,9 @@ def get_stats(cont, cont_type): # Get session and non-compliant session count match_q = {} if cont_type == 'projects': - match_q = {'project': cont['_id'], 'archived': {'$in': [None, False]}, 'deleted': {'$exists': False}} + match_q = {'project': cont['_id'], 'deleted': {'$exists': False}} elif cont_type == 'collections': - result = config.db.acquisitions.find({'collections': cont['_id'], 'archived': {'$in': [None, False]}, 'deleted': {'$exists': False}}, {'session': 1}) + result = config.db.acquisitions.find({'collections': cont['_id'], 'deleted': {'$exists': False}}, {'session': 1}) session_ids = list(set([s['session'] for s in result])) match_q = {'_id': {'$in': session_ids}} diff --git a/api/dao/hierarchy.py b/api/dao/hierarchy.py index 46837d336ef1170c22f06f4fcce6ea295a18d978..e3bce623acefd7ffb00fd0b0e96195cc914138f4 100644 --- a/api/dao/hierarchy.py +++ b/api/dao/hierarchy.py @@ -206,7 +206,7 @@ def is_session_compliant(session, template): if not session.get('_id'): # New session, won't have any acquisitions. Compliance check fails return False - acquisitions = list(config.db.acquisitions.find({'session': session['_id'], 'archived': {'$ne': True}, 'deleted': {'$exists': False}})) + acquisitions = list(config.db.acquisitions.find({'session': session['_id'], 'deleted': {'$exists': False}})) for req in a_requirements: req_temp = copy.deepcopy(req) min_count = req_temp.pop('minimum') diff --git a/api/handlers/collectionshandler.py b/api/handlers/collectionshandler.py index 5a8dd473d07a753a43a6a1c43c3e3ad849fe2ea6..703b277cc04b2e9c3946d271d8f7c64a4f1bcabb 100644 --- a/api/handlers/collectionshandler.py +++ b/api/handlers/collectionshandler.py @@ -160,9 +160,6 @@ class CollectionsHandler(ContainerHandler): ]) query = {'_id': {'$in': [ar['_id'] for ar in agg_res]}} - - if not self.is_true('archived'): - query['archived'] = {'$ne': True} if not self.superuser_request: query['permissions._id'] = self.uid @@ -193,9 +190,6 @@ class CollectionsHandler(ContainerHandler): elif sid != '': self.abort(400, sid + ' is not a valid ObjectId') - if not self.is_true('archived'): - query['archived'] = {'$ne': True} - if not self.superuser_request: query['permissions._id'] = self.uid diff --git a/api/handlers/containerhandler.py b/api/handlers/containerhandler.py index 7d420b2aa151600c719e5e28210ca769faf98b4f..373016383ad960049dc283c05a9ff38d17a990c0 100644 --- a/api/handlers/containerhandler.py +++ b/api/handlers/containerhandler.py @@ -58,7 +58,7 @@ class ContainerHandler(base.RequestHandler): 'storage_schema_file': 'project.json', 'payload_schema_file': 'project.json', 'list_projection': {'info': 0, 'files.info': 0}, - 'propagated_properties': ['archived', 'public'], + 'propagated_properties': ['public'], 'children_cont': 'sessions' }, 'sessions': { @@ -72,7 +72,6 @@ class ContainerHandler(base.RequestHandler): 'subject.lastname': 0, 'subject.sex': 0, 'subject.age': 0, 'subject.race': 0, 'subject.ethnicity': 0, 'subject.info': 0, 'files.info': 0, 'tags': 0}, - 'propagated_properties': ['archived'], 'children_cont': 'acquisitions' }, 'acquisitions': { @@ -337,8 +336,6 @@ class ContainerHandler(base.RequestHandler): query = {par_cont_name[:-1]: par_id} else: query = {} - if not self.is_true('archived'): - query['archived'] = {'$ne': True} # this request executes the actual reqeust filtering containers based on the user permissions results = permchecker(self.storage.exec_op)('GET', query=query, public=self.public_request, projection=projection) if results is None: diff --git a/api/handlers/dataexplorerhandler.py b/api/handlers/dataexplorerhandler.py index c2c8aeeb372477eefe2e61255c4fd1eb02734462..a6568ca05a6bd56b3069499008994a5bd0ee056b 100644 --- a/api/handlers/dataexplorerhandler.py +++ b/api/handlers/dataexplorerhandler.py @@ -178,13 +178,6 @@ FACET_QUERY = { "stats" : { "field" : "session.timestamp"} }, - "session.archived" : { - "terms" : { - "field" : "session.archived.raw", - "size" : 2, - "missing": "false" - } - }, } }, "session_age": { @@ -257,13 +250,11 @@ SOURCE_COLLECTION = [ SOURCE_PROJECT = SOURCE_COMMON + [ "project._id", - "project.archived", "project.label", ] SOURCE_SESSION = SOURCE_PROJECT + [ "session._id", - "session.archived", "session.created", "session.label", "session.timestamp", @@ -272,7 +263,6 @@ SOURCE_SESSION = SOURCE_PROJECT + [ SOURCE_ACQUISITION = SOURCE_SESSION + [ "acquisition._id", - "acquisition.archived", "acquisition.created", "acquisition.label", "acquisition.timestamp", diff --git a/api/jobs/handlers.py b/api/jobs/handlers.py index 9e874e11719ff71b4fa15a921dd834f22e59bbfa..f4adde6dcd9a0d3c4d8b120092dde1de21e7ebdb 100644 --- a/api/jobs/handlers.py +++ b/api/jobs/handlers.py @@ -607,12 +607,11 @@ class BatchHandler(base.RequestHandler): if not file_inputs: # Grab sessions rather than acquisitions - containers = SessionStorage().get_all_for_targets(container_type, objectIds, include_archived=False) + containers = SessionStorage().get_all_for_targets(container_type, objectIds) else: # Get acquisitions associated with targets - containers = AcquisitionStorage().get_all_for_targets(container_type, objectIds, - collection_id=collection_id, include_archived=False) + containers = AcquisitionStorage().get_all_for_targets(container_type, objectIds, collection_id=collection_id) if not containers: self.abort(404, 'Could not find necessary containers from targets.') diff --git a/bin/database.py b/bin/database.py index 36a7580e7665d26738219707985a278f87463d3d..0dbd90ba130e86675f9284c76712264c9ec1b068 100755 --- a/bin/database.py +++ b/bin/database.py @@ -22,7 +22,7 @@ from api.jobs import gears from api.types import Origin from api.jobs import batch -CURRENT_DATABASE_VERSION = 41 # An int that is bumped when a new schema change is made +CURRENT_DATABASE_VERSION = 42 # An int that is bumped when a new schema change is made def get_db_version(): @@ -1342,6 +1342,24 @@ def upgrade_to_41(): process_cursor(cursor, upgrade_to_41_closure, context=cont_name) +def upgrade_to_42_closure(cont, cont_name): + archived = cont.pop('archived') + if archived: + cont['tags'] = cont.get('tags', []) + ['hidden'] + config.db[cont_name].update_one({'_id': cont['_id']}, { + '$set': {'tags': cont['tags']}, + '$unset': {'archived': True} + }) + return True + +def upgrade_to_42(): + """ + Change container flag "archived" to container tag "hidden" + """ + for cont_name in ['groups', 'projects', 'sessions', 'acquisitions']: + cursor = config.db[cont_name].find({'archived': {'$exists': True}}) + process_cursor(cursor, upgrade_to_42_closure, context=cont_name) + ### ### BEGIN RESERVED UPGRADE SECTION diff --git a/tests/integration_tests/python/test_containers.py b/tests/integration_tests/python/test_containers.py index 66aa1e1f9a2b695d2630ff8441e047c126343535..8a5437348012a3f2f166549e18b3049b64056537 100644 --- a/tests/integration_tests/python/test_containers.py +++ b/tests/integration_tests/python/test_containers.py @@ -134,13 +134,6 @@ def test_project_template(data_builder, file_form, as_admin): assert 'satisfies_template' not in r.json() assert as_admin.put('/sessions/' + session, json={'project': project}) - # archived acqusitions don't affect session compliance - assert satisfies_template() - # Hide Acq_2 so that no acquisition in the session are compliant - assert as_admin.put('/acquisitions/' + acquisition_2, json={'archived': True}).ok - assert not satisfies_template() - assert as_admin.put('/acquisitions/' + acquisition_2, json={'archived': False}) - # acquisitions.label assert satisfies_template() assert as_admin.put('/acquisitions/' + acquisition_2, json={'label': 'non-compliant'}).ok diff --git a/tests/integration_tests/python/test_propagation.py b/tests/integration_tests/python/test_propagation.py index 81c8cb58002a70e1a587e8eb71a6df4f2fe0ea30..2dd69a9a4545cb95b50291a577dec4fd615bb04c 100644 --- a/tests/integration_tests/python/test_propagation.py +++ b/tests/integration_tests/python/test_propagation.py @@ -1,29 +1,4 @@ # Test changing propagated properties -def test_archived_propagation_from_project(data_builder, as_admin): - """ - Tests: - - 'archived' is a propagated property - - propagation works from a project level - - setting a propagated property triggers propagation - - set logic for setting 1 of the propagated properties - """ - project = data_builder.create_project() - session = data_builder.create_session() - acquisition = data_builder.create_acquisition() - - payload = {'archived': True} - r = as_admin.put('/projects/' + project, json=payload) - assert r.ok - - r = as_admin.get('/projects/' + project) - assert r.ok and r.json()['archived'] - - r = as_admin.get('/sessions/' + session) - assert r.ok and r.json()['archived'] - - r = as_admin.get('/acquisitions/' + acquisition) - assert r.ok and r.json()['archived'] - def test_public_propagation_from_project(data_builder, as_admin): """ @@ -48,32 +23,6 @@ def test_public_propagation_from_project(data_builder, as_admin): assert r.ok and not r.json()['public'] -def test_public_and_archived_propagation_from_project(data_builder, as_admin): - """ - Tests: - - set logic for setting all of the propagated properties - """ - project = data_builder.create_project() - session = data_builder.create_session() - acquisition = data_builder.create_acquisition() - - payload = {'public': False, 'archived': False} - r = as_admin.put('/projects/' + project, json=payload) - assert r.ok - - r = as_admin.get('/projects/' + project) - content = r.json() - assert r.ok and not content['public'] and not content['archived'] - - r = as_admin.get('/sessions/' + session) - content = r.json() - assert r.ok and not content['public'] and not content['archived'] - - r = as_admin.get('/acquisitions/' + acquisition) - content = r.json() - assert r.ok and not content['public'] and not content['archived'] - - def test_public_propagation_from_session(data_builder, as_admin): """ Tests: @@ -82,15 +31,15 @@ def test_public_propagation_from_session(data_builder, as_admin): session = data_builder.create_session() acquisition = data_builder.create_acquisition() - payload = {'archived': True} + payload = {'public': True} r = as_admin.put('/sessions/' + session, json=payload) assert r.ok r = as_admin.get('/sessions/' + session) - assert r.ok and r.json()['archived'] + assert r.ok and r.json()['public'] r = as_admin.get('/acquisitions/' + acquisition) - assert r.ok and r.json()['archived'] + assert r.ok and r.json()['public'] def test_set_public_acquisition(data_builder, as_admin): @@ -100,7 +49,7 @@ def test_set_public_acquisition(data_builder, as_admin): """ acquisition = data_builder.create_acquisition() - payload = {'archived': True} + payload = {'public': True} r = as_admin.put('/acquisitions/' + acquisition, json=payload) assert r.ok diff --git a/tests/integration_tests/python/test_upgrades.py b/tests/integration_tests/python/test_upgrades.py new file mode 100644 index 0000000000000000000000000000000000000000..748986e589079a5d11b8bd181f899d14553b0e94 --- /dev/null +++ b/tests/integration_tests/python/test_upgrades.py @@ -0,0 +1,28 @@ +import os +import sys + +import bson +import pytest + + +@pytest.fixture(scope='function') +def database(mocker): + bin_path = os.path.join(os.getcwd(), 'bin') + mocker.patch('sys.path', [bin_path] + sys.path) + import database + return database + + +def test_42(data_builder, api_db, as_admin, database): + # Mimic old-style archived flag + session = data_builder.create_session() + api_db.sessions.update_one({'_id': bson.ObjectId(session)}, {'$set': {'archived': True}}) + + # Verfiy archived session is not hidden anymore + assert session in [s['_id'] for s in as_admin.get('/sessions').json()] + + # Verify upgrade creates new-style hidden tag + database.upgrade_to_42() + session_data = as_admin.get('/sessions/' + session).json() + assert 'archived' not in session_data + assert 'hidden' in session_data['tags']