Skip to content
Snippets Groups Projects
Commit 7871a5c2 authored by Megan Henning's avatar Megan Henning
Browse files

Add simple endpoints for demonstration

parent 0c6b44ec
No related branches found
No related tags found
No related merge requests found
import webapp2
import webapp2_extras.routes
from .centralclient import CentralClient
from .download import Download
from .handlers.collectionshandler import CollectionsHandler
from .handlers.confighandler import Config, Version
from .handlers.containerhandler import ContainerHandler
from .handlers.devicehandler import DeviceHandler
from .handlers.grouphandler import GroupHandler
from .handlers.listhandler import AnalysesHandler, ListHandler, FileListHandler, NotesListHandler, PermissionsListHandler, TagsListHandler
from .handlers.reporthandler import ReportHandler
from .handlers.resolvehandler import ResolveHandler
from .handlers.roothandler import RootHandler
from .handlers.schemahandler import SchemaHandler
from .handlers.searchhandler import SearchHandler
from .handlers.userhandler import UserHandler
from .jobs.handlers import BatchHandler, JobsHandler, JobHandler, GearsHandler, GearHandler, RulesHandler, RuleHandler
from .upload import Upload
from .web.base import RequestHandler
from .centralclient import CentralClient
from .download import Download
from .handlers.collectionshandler import CollectionsHandler
from .handlers.confighandler import Config, Version
from .handlers.containerhandler import ContainerHandler
from .handlers.dataexplorerhandler import DataExplorerHandler
from .handlers.devicehandler import DeviceHandler
from .handlers.grouphandler import GroupHandler
from .handlers.listhandler import AnalysesHandler, ListHandler, FileListHandler, NotesListHandler, PermissionsListHandler, TagsListHandler
from .handlers.reporthandler import ReportHandler
from .handlers.resolvehandler import ResolveHandler
from .handlers.roothandler import RootHandler
from .handlers.schemahandler import SchemaHandler
from .handlers.searchhandler import SearchHandler
from .handlers.userhandler import UserHandler
from .jobs.handlers import BatchHandler, JobsHandler, JobHandler, GearsHandler, GearHandler, RulesHandler, RuleHandler
from .upload import Upload
from .web.base import RequestHandler
from . import config
log = config.log
routing_regexes = {
......@@ -51,6 +54,7 @@ routing_regexes = {
'schema': r'[^/.]{3,60}/[^/.]{3,60}\.json'
}
def route(path, target, h=None, m=None, name=None):
# https://webapp2.readthedocs.io/en/latest/api/webapp2.html#webapp2.Route
......@@ -101,10 +105,12 @@ endpoints = [
# Search
route('/search', SearchHandler, h='advanced_search', m=['POST']),
route('/search/field', SearchHandler, h='get_terms_for_field', m=['POST']),
route('/search/files', SearchHandler, h='get_datatree', m=['GET']),
route('/search/<cont_name:{cname}>', SearchHandler, m=['GET']),
route('/search', SearchHandler, h='advanced_search', m=['POST']),
route('/search/field', SearchHandler, h='get_terms_for_field', m=['POST']),
route('/search/files', SearchHandler, h='get_datatree', m=['GET']),
route('/search/<cont_name:{cname}>', SearchHandler, m=['GET']),
route('/dataexplorer/facets', DataExplorerHandler, h='search', m=['POST']),
route('/dataexplorer/search>', DataExplorerHandler, h='get_facets', m=['GET']),
# Users
......@@ -147,7 +153,6 @@ endpoints = [
route('/<:[^/]+>/suggest/<:[^/]+>/<:[^/]+>', GearHandler, h='suggest'),
]),
# Batch jobs
route('/batch', BatchHandler, h='get_all', m=['GET']),
......
import bson
import copy
import dateutil
import elasticsearch
from .. import base
from .. import config
from ..auth import require_login, require_superuser
log = config.log
TEST_QUERY = {
"query": {
"filtered": {
"query": {"match_all": {}},
"filter": {
"and": [
{"term": {"dicom_header.SeriesDescription": "fmri"}},
{
"has_parent": {
"type": "acquisition",
"query": {
"term": {"project.label": "neuro"}
}
}
}
]
}
}
}
}
MATCH_ALL= {"match_all": {}}
BASE_QUERY = {
"query": {
"filtered": {
"query": MATCH_ALL,
"filter": {
"and": [
{
"has_parent": {
"type": "acquisition"
}
}
]
}
}
}
}
FACET_QUERY = {
"size": 0,
"aggs" : {
"Series Description" : {
"terms" : {
"field" : "dicom_header.SeriesDescription_term",
"size" : 5
}
},
"Series Description Fragment" : {
"terms" : {
"field" : "dicom_header.SeriesDescription",
"size" : 5
}
},
"Patient Name" : {
"terms" : {
"field" : "dicom_header.PatientName_term",
"size" : 5
}
},
"Patient ID" : {
"terms" : {
"field" : "dicom_header.PatientID_term",
"size" : 5
}
},
"Modality" : {
"terms" : {
"field" : "dicom_header.Modality_term",
"size" : 5
}
},
"Study Date" : {
"date_histogram" : {
"field" : "dicom_header.StudyDate",
"interval" : "day"
}
}
}
}
class DataExplorerHandler(base.RequestHandler):
def __init__(self, request=None, response=None):
super(DataExplorerHandler, self).__init__(request, response)
@require_login
def search(self):
user_query = self.request.json_body.get('query')
return self._run_query(self._construct_query(user_query))
def _construct_query(self, user_query):
es_query = copy.deepcopy(BASE_QUERY)
and_block = es_query['query']['filtered']['filter']['and']
parent_block = and_block[0]['has_parent']
user_flywheel_query = user_query.get('flywheel')
if user_flywheel_query:
parent_block['query'] = {'term': user_flywheel_query}
else:
parent_block['filter'] = MATCH_ALL
user_file_query = user_query.get('file')
if user_file_query:
log.debug('adding stuff')
for k,v in user_file_query.iteritems():
and_block.append({'term': {k: v}})
log.debug(es_query)
return es_query
def _run_query(self, es_query):
results = config.es.search(
index='dicom_store',
doc_type='dicom',
body=es_query,
size=10000
)
return { 'results': results['hits']['hits'], 'result_count': results['hits']['total']}
def get_facets(self):
results = config.es.search(
index='dicom_store',
doc_type='dicom',
body=FACET_QUERY,
size=10000
)['aggregations']
return {'facets': results}
......@@ -14,6 +14,109 @@ db = config.db
DICOM_INDEX = 'dicom_store'
ANALYSIS = {
'analyzer' : {
'str_search_analyzer' : {
'tokenizer' : 'keyword',
'filter' : ['lowercase']
},
'str_index_analyzer' : {
'tokenizer' : 'keyword',
'filter' : ['lowercase', 'substring']
}
},
'filter' : {
'substring': {
'type': 'nGram',
'min_gram': 2,
'max_gram': 50,
'token_chars': []
}
}
}
DYNAMIC_TEMPLATES = [{
'_id': {
'match': '_id',
'match_mapping_type' : 'string',
'mapping': {
'type': 'string',
'index': 'not_analyzed'
}
}
},
{
'long_fields' : {
'match_mapping_type' : 'long',
'mapping' : {
'ignore_malformed': True
}
}
},
{
'integer_fields' : {
'match_mapping_type' : 'integer',
'mapping' : {
'ignore_malformed': True
}
}
},
{
'double_fields' : {
'match_mapping_type' : 'double',
'mapping' : {
'ignore_malformed': True
}
}
},
{
'float_fields' : {
'match_mapping_type' : 'float',
'mapping' : {
'ignore_malformed': True
}
}
},
{
'short_fields' : {
'match_mapping_type' : 'short',
'mapping' : {
'ignore_malformed': True
}
}
},
{
'byte_fields' : {
'match_mapping_type' : 'byte',
'mapping' : {
'ignore_malformed': True
}
}
},
{
'hash': {
'match': 'hash',
'match_mapping_type' : 'string',
'mapping': {
'type': 'string',
'index': 'not_analyzed'
}
}
},
{
'string_fields' : {
'match': '*',
'match_mapping_type' : 'string',
'mapping' : {
'type': 'string',
'search_analyzer': 'str_search_analyzer',
'index_analyzer': 'str_index_analyzer',
'ignore_above': 10922
}
}
}]
def datetime(str_datetime):
pass
......@@ -155,10 +258,19 @@ if __name__ == '__main__':
request = {
'settings': {
'number_of_shards': 1,
'number_of_replicas': 0
'number_of_replicas': 0,
'analysis' : ANALYSIS
},
'mappings': {
'_default_' : {
'_all' : {'enabled' : True},
'dynamic_templates': DYNAMIC_TEMPLATES
},
'acquisition': {},
'dicom': {
'_parent': {
'type': 'acquisition'
},
'properties': {
'dicom_header': {
'properties': mappings
......@@ -181,46 +293,55 @@ if __name__ == '__main__':
projects = db.projects.find({'group': g['_id']})
for p in projects:
p.pop('permissions', None)
logging.warn('the project is {}'.format(p['label']))
sessions = db.sessions.find({'project': p['_id']})
for s in sessions:
s.pop('permissions', None)
acquisitions = db.acquisitions.find({'session': s['_id'], 'files.type': 'dicom'})
for a in acquisitions:
dicom_data = a.get('metadata')
if dicom_data:
term_fields = {}
for s in SKIPPED:
dicom_data.pop(s, None)
for k,v in dicom_data.iteritems():
if 'datetime' in k.lower():
config.log.debug('called for {}'.format(k))
v = cast_datetime(str(v))
elif 'date' in k.lower():
config.log.debug('called for {}'.format(k))
v = cast_date(str(v))
elif 'time' in k.lower():
config.log.debug('called for {}'.format(k))
v = cast_time(str(v))
term_field_name = k+'_term'
if term_field_name in dicom_mappings:
term_fields[k+'_term'] = str(v)
dicom_data.update(term_fields)
permissions = a['permissions']
doc = {
'dicom_header': dicom_data,
'base_container_type': 'acquisition',
'acquisition': a,
'session': s,
'project': p,
'group': g,
'permissions': a['permissions']
}
doc = json.dumps(doc, default=encoder.custom_json_serializer)
es.index(index=DICOM_INDEX, doc_type='dicom', body=doc)
permissions = a.pop('permissions', [])
files = a.pop('files', [])
doc = {
'acquisition': a,
'session': s,
'project': p,
'group': g,
'permissions': permissions
}
doc = json.dumps(doc, default=encoder.custom_json_serializer)
es.index(index=DICOM_INDEX, id=a['_id'], doc_type='acquisition', body=doc)
for f in files:
if f.get('type', '') == 'dicom' and f.get('info'):
dicom_data = f.pop('info')
term_fields = {}
for skipped in SKIPPED:
dicom_data.pop(skipped, None)
for k,v in dicom_data.iteritems():
if 'datetime' in k.lower():
config.log.debug('called for {}'.format(k))
v = cast_datetime(str(v))
elif 'date' in k.lower():
config.log.debug('called for {}'.format(k))
v = cast_date(str(v))
elif 'time' in k.lower():
config.log.debug('called for {}'.format(k))
v = cast_time(str(v))
term_field_name = k+'_term'
if term_field_name in dicom_mappings:
term_fields[k+'_term'] = str(v)
dicom_data.update(term_fields)
doc = {
'file': f,
'dicom_header': dicom_data
}
doc = json.dumps(doc, default=encoder.custom_json_serializer)
es.index(index=DICOM_INDEX, id=f['name'], parent=a['_id'], doc_type='dicom', body=doc)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment