Skip to content
GitLab
Explore
Sign in
Register
Primary navigation
Search or go to…
Project
C
core
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Requirements
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Package Registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to JiHu GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Chenhao Ma
core
Commits
7871a5c2
Commit
7871a5c2
authored
8 years ago
by
Megan Henning
Browse files
Options
Downloads
Patches
Plain Diff
Add simple endpoints for demonstration
parent
0c6b44ec
No related branches found
Branches containing commit
No related tags found
Tags containing commit
No related merge requests found
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
api/api.py
+27
-22
27 additions, 22 deletions
api/api.py
api/handlers/dataexplorerhandler.py
+139
-0
139 additions, 0 deletions
api/handlers/dataexplorerhandler.py
bin/dicom_doctype.py
+158
-37
158 additions, 37 deletions
bin/dicom_doctype.py
with
324 additions
and
59 deletions
api/api.py
+
27
−
22
View file @
7871a5c2
import
webapp2
import
webapp2_extras.routes
from
.centralclient
import
CentralClient
from
.download
import
Download
from
.handlers.collectionshandler
import
CollectionsHandler
from
.handlers.confighandler
import
Config
,
Version
from
.handlers.containerhandler
import
ContainerHandler
from
.handlers.devicehandler
import
DeviceHandler
from
.handlers.grouphandler
import
GroupHandler
from
.handlers.listhandler
import
AnalysesHandler
,
ListHandler
,
FileListHandler
,
NotesListHandler
,
PermissionsListHandler
,
TagsListHandler
from
.handlers.reporthandler
import
ReportHandler
from
.handlers.resolvehandler
import
ResolveHandler
from
.handlers.roothandler
import
RootHandler
from
.handlers.schemahandler
import
SchemaHandler
from
.handlers.searchhandler
import
SearchHandler
from
.handlers.userhandler
import
UserHandler
from
.jobs.handlers
import
BatchHandler
,
JobsHandler
,
JobHandler
,
GearsHandler
,
GearHandler
,
RulesHandler
,
RuleHandler
from
.upload
import
Upload
from
.web.base
import
RequestHandler
from
.centralclient
import
CentralClient
from
.download
import
Download
from
.handlers.collectionshandler
import
CollectionsHandler
from
.handlers.confighandler
import
Config
,
Version
from
.handlers.containerhandler
import
ContainerHandler
from
.handlers.dataexplorerhandler
import
DataExplorerHandler
from
.handlers.devicehandler
import
DeviceHandler
from
.handlers.grouphandler
import
GroupHandler
from
.handlers.listhandler
import
AnalysesHandler
,
ListHandler
,
FileListHandler
,
NotesListHandler
,
PermissionsListHandler
,
TagsListHandler
from
.handlers.reporthandler
import
ReportHandler
from
.handlers.resolvehandler
import
ResolveHandler
from
.handlers.roothandler
import
RootHandler
from
.handlers.schemahandler
import
SchemaHandler
from
.handlers.searchhandler
import
SearchHandler
from
.handlers.userhandler
import
UserHandler
from
.jobs.handlers
import
BatchHandler
,
JobsHandler
,
JobHandler
,
GearsHandler
,
GearHandler
,
RulesHandler
,
RuleHandler
from
.upload
import
Upload
from
.web.base
import
RequestHandler
from
.
import
config
log
=
config
.
log
routing_regexes
=
{
...
...
@@ -51,6 +54,7 @@ routing_regexes = {
'
schema
'
:
r
'
[^/.]{3,60}/[^/.]{3,60}\.json
'
}
def
route
(
path
,
target
,
h
=
None
,
m
=
None
,
name
=
None
):
# https://webapp2.readthedocs.io/en/latest/api/webapp2.html#webapp2.Route
...
...
@@ -101,10 +105,12 @@ endpoints = [
# Search
route
(
'
/search
'
,
SearchHandler
,
h
=
'
advanced_search
'
,
m
=
[
'
POST
'
]),
route
(
'
/search/field
'
,
SearchHandler
,
h
=
'
get_terms_for_field
'
,
m
=
[
'
POST
'
]),
route
(
'
/search/files
'
,
SearchHandler
,
h
=
'
get_datatree
'
,
m
=
[
'
GET
'
]),
route
(
'
/search/<cont_name:{cname}>
'
,
SearchHandler
,
m
=
[
'
GET
'
]),
route
(
'
/search
'
,
SearchHandler
,
h
=
'
advanced_search
'
,
m
=
[
'
POST
'
]),
route
(
'
/search/field
'
,
SearchHandler
,
h
=
'
get_terms_for_field
'
,
m
=
[
'
POST
'
]),
route
(
'
/search/files
'
,
SearchHandler
,
h
=
'
get_datatree
'
,
m
=
[
'
GET
'
]),
route
(
'
/search/<cont_name:{cname}>
'
,
SearchHandler
,
m
=
[
'
GET
'
]),
route
(
'
/dataexplorer/facets
'
,
DataExplorerHandler
,
h
=
'
search
'
,
m
=
[
'
POST
'
]),
route
(
'
/dataexplorer/search>
'
,
DataExplorerHandler
,
h
=
'
get_facets
'
,
m
=
[
'
GET
'
]),
# Users
...
...
@@ -147,7 +153,6 @@ endpoints = [
route
(
'
/<:[^/]+>/suggest/<:[^/]+>/<:[^/]+>
'
,
GearHandler
,
h
=
'
suggest
'
),
]),
# Batch jobs
route
(
'
/batch
'
,
BatchHandler
,
h
=
'
get_all
'
,
m
=
[
'
GET
'
]),
...
...
This diff is collapsed.
Click to expand it.
api/handlers/dataexplorerhandler.py
0 → 100644
+
139
−
0
View file @
7871a5c2
import
bson
import
copy
import
dateutil
import
elasticsearch
from
..
import
base
from
..
import
config
from
..auth
import
require_login
,
require_superuser
log
=
config
.
log
TEST_QUERY
=
{
"
query
"
:
{
"
filtered
"
:
{
"
query
"
:
{
"
match_all
"
:
{}},
"
filter
"
:
{
"
and
"
:
[
{
"
term
"
:
{
"
dicom_header.SeriesDescription
"
:
"
fmri
"
}},
{
"
has_parent
"
:
{
"
type
"
:
"
acquisition
"
,
"
query
"
:
{
"
term
"
:
{
"
project.label
"
:
"
neuro
"
}
}
}
}
]
}
}
}
}
MATCH_ALL
=
{
"
match_all
"
:
{}}
BASE_QUERY
=
{
"
query
"
:
{
"
filtered
"
:
{
"
query
"
:
MATCH_ALL
,
"
filter
"
:
{
"
and
"
:
[
{
"
has_parent
"
:
{
"
type
"
:
"
acquisition
"
}
}
]
}
}
}
}
FACET_QUERY
=
{
"
size
"
:
0
,
"
aggs
"
:
{
"
Series Description
"
:
{
"
terms
"
:
{
"
field
"
:
"
dicom_header.SeriesDescription_term
"
,
"
size
"
:
5
}
},
"
Series Description Fragment
"
:
{
"
terms
"
:
{
"
field
"
:
"
dicom_header.SeriesDescription
"
,
"
size
"
:
5
}
},
"
Patient Name
"
:
{
"
terms
"
:
{
"
field
"
:
"
dicom_header.PatientName_term
"
,
"
size
"
:
5
}
},
"
Patient ID
"
:
{
"
terms
"
:
{
"
field
"
:
"
dicom_header.PatientID_term
"
,
"
size
"
:
5
}
},
"
Modality
"
:
{
"
terms
"
:
{
"
field
"
:
"
dicom_header.Modality_term
"
,
"
size
"
:
5
}
},
"
Study Date
"
:
{
"
date_histogram
"
:
{
"
field
"
:
"
dicom_header.StudyDate
"
,
"
interval
"
:
"
day
"
}
}
}
}
class
DataExplorerHandler
(
base
.
RequestHandler
):
def
__init__
(
self
,
request
=
None
,
response
=
None
):
super
(
DataExplorerHandler
,
self
).
__init__
(
request
,
response
)
@require_login
def
search
(
self
):
user_query
=
self
.
request
.
json_body
.
get
(
'
query
'
)
return
self
.
_run_query
(
self
.
_construct_query
(
user_query
))
def
_construct_query
(
self
,
user_query
):
es_query
=
copy
.
deepcopy
(
BASE_QUERY
)
and_block
=
es_query
[
'
query
'
][
'
filtered
'
][
'
filter
'
][
'
and
'
]
parent_block
=
and_block
[
0
][
'
has_parent
'
]
user_flywheel_query
=
user_query
.
get
(
'
flywheel
'
)
if
user_flywheel_query
:
parent_block
[
'
query
'
]
=
{
'
term
'
:
user_flywheel_query
}
else
:
parent_block
[
'
filter
'
]
=
MATCH_ALL
user_file_query
=
user_query
.
get
(
'
file
'
)
if
user_file_query
:
log
.
debug
(
'
adding stuff
'
)
for
k
,
v
in
user_file_query
.
iteritems
():
and_block
.
append
({
'
term
'
:
{
k
:
v
}})
log
.
debug
(
es_query
)
return
es_query
def
_run_query
(
self
,
es_query
):
results
=
config
.
es
.
search
(
index
=
'
dicom_store
'
,
doc_type
=
'
dicom
'
,
body
=
es_query
,
size
=
10000
)
return
{
'
results
'
:
results
[
'
hits
'
][
'
hits
'
],
'
result_count
'
:
results
[
'
hits
'
][
'
total
'
]}
def
get_facets
(
self
):
results
=
config
.
es
.
search
(
index
=
'
dicom_store
'
,
doc_type
=
'
dicom
'
,
body
=
FACET_QUERY
,
size
=
10000
)[
'
aggregations
'
]
return
{
'
facets
'
:
results
}
This diff is collapsed.
Click to expand it.
bin/dicom_doctype.py
+
158
−
37
View file @
7871a5c2
...
...
@@ -14,6 +14,109 @@ db = config.db
DICOM_INDEX
=
'
dicom_store
'
ANALYSIS
=
{
'
analyzer
'
:
{
'
str_search_analyzer
'
:
{
'
tokenizer
'
:
'
keyword
'
,
'
filter
'
:
[
'
lowercase
'
]
},
'
str_index_analyzer
'
:
{
'
tokenizer
'
:
'
keyword
'
,
'
filter
'
:
[
'
lowercase
'
,
'
substring
'
]
}
},
'
filter
'
:
{
'
substring
'
:
{
'
type
'
:
'
nGram
'
,
'
min_gram
'
:
2
,
'
max_gram
'
:
50
,
'
token_chars
'
:
[]
}
}
}
DYNAMIC_TEMPLATES
=
[{
'
_id
'
:
{
'
match
'
:
'
_id
'
,
'
match_mapping_type
'
:
'
string
'
,
'
mapping
'
:
{
'
type
'
:
'
string
'
,
'
index
'
:
'
not_analyzed
'
}
}
},
{
'
long_fields
'
:
{
'
match_mapping_type
'
:
'
long
'
,
'
mapping
'
:
{
'
ignore_malformed
'
:
True
}
}
},
{
'
integer_fields
'
:
{
'
match_mapping_type
'
:
'
integer
'
,
'
mapping
'
:
{
'
ignore_malformed
'
:
True
}
}
},
{
'
double_fields
'
:
{
'
match_mapping_type
'
:
'
double
'
,
'
mapping
'
:
{
'
ignore_malformed
'
:
True
}
}
},
{
'
float_fields
'
:
{
'
match_mapping_type
'
:
'
float
'
,
'
mapping
'
:
{
'
ignore_malformed
'
:
True
}
}
},
{
'
short_fields
'
:
{
'
match_mapping_type
'
:
'
short
'
,
'
mapping
'
:
{
'
ignore_malformed
'
:
True
}
}
},
{
'
byte_fields
'
:
{
'
match_mapping_type
'
:
'
byte
'
,
'
mapping
'
:
{
'
ignore_malformed
'
:
True
}
}
},
{
'
hash
'
:
{
'
match
'
:
'
hash
'
,
'
match_mapping_type
'
:
'
string
'
,
'
mapping
'
:
{
'
type
'
:
'
string
'
,
'
index
'
:
'
not_analyzed
'
}
}
},
{
'
string_fields
'
:
{
'
match
'
:
'
*
'
,
'
match_mapping_type
'
:
'
string
'
,
'
mapping
'
:
{
'
type
'
:
'
string
'
,
'
search_analyzer
'
:
'
str_search_analyzer
'
,
'
index_analyzer
'
:
'
str_index_analyzer
'
,
'
ignore_above
'
:
10922
}
}
}]
def
datetime
(
str_datetime
):
pass
...
...
@@ -155,10 +258,19 @@ if __name__ == '__main__':
request
=
{
'
settings
'
:
{
'
number_of_shards
'
:
1
,
'
number_of_replicas
'
:
0
'
number_of_replicas
'
:
0
,
'
analysis
'
:
ANALYSIS
},
'
mappings
'
:
{
'
_default_
'
:
{
'
_all
'
:
{
'
enabled
'
:
True
},
'
dynamic_templates
'
:
DYNAMIC_TEMPLATES
},
'
acquisition
'
:
{},
'
dicom
'
:
{
'
_parent
'
:
{
'
type
'
:
'
acquisition
'
},
'
properties
'
:
{
'
dicom_header
'
:
{
'
properties
'
:
mappings
...
...
@@ -181,46 +293,55 @@ if __name__ == '__main__':
projects
=
db
.
projects
.
find
({
'
group
'
:
g
[
'
_id
'
]})
for
p
in
projects
:
p
.
pop
(
'
permissions
'
,
None
)
logging
.
warn
(
'
the project is {}
'
.
format
(
p
[
'
label
'
]))
sessions
=
db
.
sessions
.
find
({
'
project
'
:
p
[
'
_id
'
]})
for
s
in
sessions
:
s
.
pop
(
'
permissions
'
,
None
)
acquisitions
=
db
.
acquisitions
.
find
({
'
session
'
:
s
[
'
_id
'
],
'
files.type
'
:
'
dicom
'
})
for
a
in
acquisitions
:
dicom_data
=
a
.
get
(
'
metadata
'
)
if
dicom_data
:
term_fields
=
{}
for
s
in
SKIPPED
:
dicom_data
.
pop
(
s
,
None
)
for
k
,
v
in
dicom_data
.
iteritems
():
if
'
datetime
'
in
k
.
lower
():
config
.
log
.
debug
(
'
called for {}
'
.
format
(
k
))
v
=
cast_datetime
(
str
(
v
))
elif
'
date
'
in
k
.
lower
():
config
.
log
.
debug
(
'
called for {}
'
.
format
(
k
))
v
=
cast_date
(
str
(
v
))
elif
'
time
'
in
k
.
lower
():
config
.
log
.
debug
(
'
called for {}
'
.
format
(
k
))
v
=
cast_time
(
str
(
v
))
term_field_name
=
k
+
'
_term
'
if
term_field_name
in
dicom_mappings
:
term_fields
[
k
+
'
_term
'
]
=
str
(
v
)
dicom_data
.
update
(
term_fields
)
permissions
=
a
[
'
permissions
'
]
doc
=
{
'
dicom_header
'
:
dicom_data
,
'
base_container_type
'
:
'
acquisition
'
,
'
acquisition
'
:
a
,
'
session
'
:
s
,
'
project
'
:
p
,
'
group
'
:
g
,
'
permissions
'
:
a
[
'
permissions
'
]
}
doc
=
json
.
dumps
(
doc
,
default
=
encoder
.
custom_json_serializer
)
es
.
index
(
index
=
DICOM_INDEX
,
doc_type
=
'
dicom
'
,
body
=
doc
)
permissions
=
a
.
pop
(
'
permissions
'
,
[])
files
=
a
.
pop
(
'
files
'
,
[])
doc
=
{
'
acquisition
'
:
a
,
'
session
'
:
s
,
'
project
'
:
p
,
'
group
'
:
g
,
'
permissions
'
:
permissions
}
doc
=
json
.
dumps
(
doc
,
default
=
encoder
.
custom_json_serializer
)
es
.
index
(
index
=
DICOM_INDEX
,
id
=
a
[
'
_id
'
],
doc_type
=
'
acquisition
'
,
body
=
doc
)
for
f
in
files
:
if
f
.
get
(
'
type
'
,
''
)
==
'
dicom
'
and
f
.
get
(
'
info
'
):
dicom_data
=
f
.
pop
(
'
info
'
)
term_fields
=
{}
for
skipped
in
SKIPPED
:
dicom_data
.
pop
(
skipped
,
None
)
for
k
,
v
in
dicom_data
.
iteritems
():
if
'
datetime
'
in
k
.
lower
():
config
.
log
.
debug
(
'
called for {}
'
.
format
(
k
))
v
=
cast_datetime
(
str
(
v
))
elif
'
date
'
in
k
.
lower
():
config
.
log
.
debug
(
'
called for {}
'
.
format
(
k
))
v
=
cast_date
(
str
(
v
))
elif
'
time
'
in
k
.
lower
():
config
.
log
.
debug
(
'
called for {}
'
.
format
(
k
))
v
=
cast_time
(
str
(
v
))
term_field_name
=
k
+
'
_term
'
if
term_field_name
in
dicom_mappings
:
term_fields
[
k
+
'
_term
'
]
=
str
(
v
)
dicom_data
.
update
(
term_fields
)
doc
=
{
'
file
'
:
f
,
'
dicom_header
'
:
dicom_data
}
doc
=
json
.
dumps
(
doc
,
default
=
encoder
.
custom_json_serializer
)
es
.
index
(
index
=
DICOM_INDEX
,
id
=
f
[
'
name
'
],
parent
=
a
[
'
_id
'
],
doc_type
=
'
dicom
'
,
body
=
doc
)
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment