diff --git a/api/placer.py b/api/placer.py index e11d3b5bea5c0e407945ac7116c45c0d03ebd56e..2bba7ca5596ae425aca6f4b4202b868b3748b195 100644 --- a/api/placer.py +++ b/api/placer.py @@ -562,7 +562,8 @@ class PackfilePlacer(Placer): query = { 'project': bson.ObjectId(self.p_id), 'label': self.s_label, - 'group': self.g_id + 'group': self.g_id, + 'deleted': {'$exists': False} } if self.s_code: @@ -577,14 +578,18 @@ class PackfilePlacer(Placer): # Extra properties on insert insert_map = copy.deepcopy(query) - insert_map.pop('subject.code', None) # Remove query term that should not become part of the payload + + # Remove query term that should not become part of the payload + insert_map.pop('subject.code', None) + insert_map.pop('deleted') + insert_map['created'] = self.timestamp insert_map.update(self.metadata['session']) insert_map['subject'] = containerutil.add_id_to_subject(insert_map.get('subject'), bson.ObjectId(self.p_id)) if 'timestamp' in insert_map: insert_map['timestamp'] = dateutil.parser.parse(insert_map['timestamp']) - session = config.db['session' + 's'].find_one_and_update( + session = config.db.sessions.find_one_and_update( query, { '$set': updates, '$setOnInsert': insert_map @@ -597,6 +602,7 @@ class PackfilePlacer(Placer): query = { 'session': session['_id'], 'label': self.a_label, + 'deleted': {'$exists': False} } if self.a_time: @@ -612,12 +618,16 @@ class PackfilePlacer(Placer): # Extra properties on insert insert_map = copy.deepcopy(query) + + # Remove query term that should not become part of the payload + insert_map.pop('deleted') + insert_map['created'] = self.timestamp insert_map.update(self.metadata['acquisition']) if 'timestamp' in insert_map: insert_map['timestamp'] = dateutil.parser.parse(insert_map['timestamp']) - acquisition = config.db['acquisition' + 's'].find_one_and_update( + acquisition = config.db.acquisitions.find_one_and_update( query, { '$set': updates, '$setOnInsert': insert_map diff --git a/tests/integration_tests/python/test_uploads.py b/tests/integration_tests/python/test_uploads.py index d97fa8b8b9472ebd24c3ddd025b7a4fb3f57fdb6..607ec86e8d0df0a5a27a7fe42e7911a640aa97bb 100644 --- a/tests/integration_tests/python/test_uploads.py +++ b/tests/integration_tests/python/test_uploads.py @@ -1192,14 +1192,41 @@ def test_packfile_upload(data_builder, file_form, as_admin, as_root, api_db): # We didn't fine one assert False + # Remove sessions and acquisitions via delete and ensure new containers are created + session_ids_before = [str(x['_id']) for x in sessions] + acquisition_ids_before = [str(x['_id']) for x in acquisitions] + for s in session_ids_before: + assert as_admin.delete('/sessions/'+s).ok + + # Add another packfile with the same metadata as above + r = as_admin.post('/projects/' + project + '/packfile-start') + assert r.ok + token = r.json()['token'] + r = as_admin.post('/projects/' + project + '/packfile', + params={'token': token}, files=file_form('one.csv')) + assert r.ok + + r = as_admin.post('/projects/' + project + '/packfile-end', + params={'token': token, 'metadata': metadata_json}) + assert r.ok + + # Ensure a new session and acquisition was created + sessions_after = list(api_db.sessions.find({'label':'test-packfile-timestamp', 'deleted': {'$exists': False}})) + acquisitions_after = list(api_db.acquisitions.find({'label':'test-packfile-timestamp', 'deleted': {'$exists': False}})) + assert len(sessions_after) == 1 + assert len(acquisitions_after) == 1 + assert str(sessions_after[0]['_id']) not in session_ids_before + assert str(acquisitions_after[0]['_id']) not in acquisition_ids_before + + # get another token (start packfile-upload) r = as_admin.post('/projects/' + project + '/packfile-start') assert r.ok token = r.json()['token'] - files = [ - ('file', file_form('two.csv')['file']) , - ('file', file_form('three.csv')['file']) + files = [ + ('file', file_form('two.csv')['file']) , + ('file', file_form('three.csv')['file']) ] # upload to packfile