Skip to content
GitLab
Explore
Sign in
Register
Primary navigation
Search or go to…
Project
C
core
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Requirements
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Package Registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to JiHu GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Chenhao Ma
core
Commits
3ffb4dab
Commit
3ffb4dab
authored
7 years ago
by
Megan Henning
Browse files
Options
Downloads
Patches
Plain Diff
Reorganize code pieces in de
parent
ed8e33f1
No related branches found
Branches containing commit
No related tags found
Tags containing commit
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
api/api.py
+1
-1
1 addition, 1 deletion
api/api.py
api/handlers/dataexplorerhandler.py
+124
-132
124 additions, 132 deletions
api/handlers/dataexplorerhandler.py
with
125 additions
and
133 deletions
api/api.py
+
1
−
1
View file @
3ffb4dab
...
...
@@ -103,7 +103,7 @@ endpoints = [
route
(
'
/dataexplorer/search
'
,
DataExplorerHandler
,
h
=
'
search
'
,
m
=
[
'
POST
'
]),
route
(
'
/dataexplorer/facets
'
,
DataExplorerHandler
,
h
=
'
get_facets
'
,
m
=
[
'
POST
'
]),
route
(
'
/dataexplorer/search/fields
'
,
DataExplorerHandler
,
h
=
'
search_fields
'
,
m
=
[
'
POST
'
]),
route
(
'
/dataexplorer/search/fields/aggregate
'
,
DataExplorerHandler
,
h
=
'
custom
_field_values
'
,
m
=
[
'
POST
'
]),
route
(
'
/dataexplorer/search/fields/aggregate
'
,
DataExplorerHandler
,
h
=
'
aggregate
_field_values
'
,
m
=
[
'
POST
'
]),
route
(
'
/dataexplorer/index/fields
'
,
DataExplorerHandler
,
h
=
'
index_field_names
'
,
m
=
[
'
POST
'
]),
# Users
...
...
This diff is collapsed.
Click to expand it.
api/handlers/dataexplorerhandler.py
+
124
−
132
View file @
3ffb4dab
...
...
@@ -9,6 +9,78 @@ from ..auth import require_login, require_superuser
log
=
config
.
log
"""
EXAMPLE_SESSION_QUERY = {
"
size
"
: 0,
"
query
"
: {
"
match
"
: {
"
_all
"
:
"
test
'"
}
},
"
aggs
"
: {
"
by_session
"
: {
"
terms
"
: {
"
field
"
:
"
session._id
"
,
"
size
"
: 100
},
"
aggs
"
: {
"
by_top_hit
"
: {
"
top_hits
"
: {
"
size
"
: 1
}
}
}
}
}
}
EXAMPLE_ACQUISITION_QUERY = {
"
size
"
: 0,
"
query
"
: {
"
match
"
: {
"
_all
"
:
"
megan
'"
}
},
"
aggs
"
: {
"
by_session
"
: {
"
terms
"
: {
"
field
"
:
"
acquisition._id
"
,
"
size
"
: 100
},
"
aggs
"
: {
"
by_top_hit
"
: {
"
top_hits
"
: {
"
size
"
: 1
}
}
}
}
}
}
EXAMPLE_FILE_QUERY = {
"
size
"
: 100,
"
query
"
: {
"
bool
"
: {
"
must
"
: {
"
match
"
: {
"
_all
"
:
"
brain
"
}
},
"
filter
"
: {
"
bool
"
: {
"
must
"
: [
{
"
term
"
: {
"
file.type
"
:
"
dicom
"
}},
{
"
term
"
: {
"
container_type
"
:
"
file
"
}}
]
}
}
}
}
}
"""
ANALYSIS
=
{
"
analyzer
"
:
{
"
my_analyzer
"
:
{
...
...
@@ -150,75 +222,6 @@ FACET_QUERY = {
}
}
EXAMPLE_SESSION_QUERY
=
{
"
size
"
:
0
,
"
query
"
:
{
"
match
"
:
{
"
_all
"
:
"
test
'"
}
},
"
aggs
"
:
{
"
by_session
"
:
{
"
terms
"
:
{
"
field
"
:
"
session._id
"
,
"
size
"
:
100
},
"
aggs
"
:
{
"
by_top_hit
"
:
{
"
top_hits
"
:
{
"
size
"
:
1
}
}
}
}
}
}
EXAMPLE_ACQUISITION_QUERY
=
{
"
size
"
:
0
,
"
query
"
:
{
"
match
"
:
{
"
_all
"
:
"
megan
'"
}
},
"
aggs
"
:
{
"
by_session
"
:
{
"
terms
"
:
{
"
field
"
:
"
acquisition._id
"
,
"
size
"
:
100
},
"
aggs
"
:
{
"
by_top_hit
"
:
{
"
top_hits
"
:
{
"
size
"
:
1
}
}
}
}
}
}
EXAMPLE_FILE_QUERY
=
{
"
size
"
:
100
,
"
query
"
:
{
"
bool
"
:
{
"
must
"
:
{
"
match
"
:
{
"
_all
"
:
"
brain
"
}
},
"
filter
"
:
{
"
bool
"
:
{
"
must
"
:
[
{
"
term
"
:
{
"
file.type
"
:
"
dicom
"
}},
{
"
term
"
:
{
"
container_type
"
:
"
file
"
}}
]
}
}
}
}
}
class
DataExplorerHandler
(
base
.
RequestHandler
):
# pylint: disable=broad-except
...
...
@@ -230,7 +233,7 @@ class DataExplorerHandler(base.RequestHandler):
try
:
request
=
self
.
request
.
json_body
except
Key
Error
:
except
(
Value
Error
)
:
if
request_type
==
'
search
'
:
self
.
abort
(
400
,
'
Must specify return type
'
)
return
None
,
None
,
None
...
...
@@ -265,85 +268,74 @@ class DataExplorerHandler(base.RequestHandler):
return
return_type
,
modified_filters
,
search_string
@require_login
def
custom
_field_values
(
self
):
def
aggregate
_field_values
(
self
):
"""
Return list of type ahead values for a key given a value
that the user has already started to type in for the value of
Return list of type ahead values for a key given a value
that the user has already started to type in for the value of
a custom string field or a set of statistics if the field type is
a number.
"""
custom_field
=
self
.
request
.
json_body
[
'
field_name
'
]
try
:
field_name
=
self
.
request
.
json_body
[
'
field_name
'
]
except
(
KeyError
,
ValueError
):
self
.
abort
(
400
,
'
Field name is required
'
)
filters
=
[{
'
term
'
:
{
'
permissions._id
'
:
self
.
uid
}}]
field
=
config
.
es
.
indices
.
get_field_mapping
(
custom_field
,
index
=
'
data_explorer
'
,
doc_type
=
'
flywheel
'
)[
'
data_explorer
'
][
'
mappings
'
][
'
flywheel
'
][
custom_field
]
field_type
=
self
.
_get_field_type
(
field
[
'
mapping
'
][
custom_field
.
split
(
'
.
'
)[
-
1
]][
'
type
'
])
try
:
field
=
config
.
es
.
get
(
index
=
'
data_explorer_fields
'
,
id
=
field_name
,
doc_type
=
'
flywheel_field
'
)
except
TransportError
as
e
:
log
.
warning
(
e
)
self
.
abort
(
404
,
'
Could not find mapping for field {}.
'
.
format
(
field_name
))
field_type
=
field
[
'
_source
'
][
'
type
'
]
search_string
=
self
.
request
.
json_body
.
get
(
'
search_string
'
,
None
)
# If the field type is a string, return a list of type-ahead values
if
field_type
==
'
string
'
:
user_value
=
self
.
request
.
json_body
[
'
value
'
]
body
=
{
"
size
"
:
0
,
"
query
"
:
{
"
bool
"
:
{
"
must
"
:
{
"
match
"
:
{
custom_field
:
user_value
}
},
"
filter
"
:
filters
}
},
"
aggs
"
:
{
"
results
"
:
{
"
terms
"
:
{
"
field
"
:
custom_field
+
"
.raw
"
,
"
size
"
:
15
}
}
body
=
{
"
size
"
:
0
,
"
query
"
:
{
"
bool
"
:
{
"
must
"
:
{
"
match
"
:
{
field_name
:
search_string
}
},
"
filter
"
:
filters
}
}
}
if
not
filters
:
body
[
'
query
'
][
'
bool
'
].
pop
(
'
filter
'
)
if
search_string
is
None
:
body
[
'
query
'
][
'
bool
'
][
'
must
'
]
=
MATCH_ALL
if
not
filters
:
body
[
'
query
'
][
'
bool
'
].
pop
(
'
filter
'
)
aggs
=
config
.
es
.
search
(
index
=
'
data_explorer
'
,
doc_type
=
'
flywheel
'
,
body
=
body
)[
'
aggregations
'
][
'
results
'
][
'
buckets
'
]
aggs
=
[
bucket
[
'
key
'
]
for
bucket
in
aggs
]
return
{
'
type_aheads
'
:
aggs
}
if
field_type
in
[
'
string
'
,
'
boolean
'
]:
body
[
'
aggs
'
]
=
{
"
results
"
:
{
"
terms
"
:
{
"
field
"
:
field_name
+
"
.raw
"
,
"
size
"
:
15
}
}
}
# If it is a number (int, date, or some other type), return various statistics on the values of the field
else
:
body
=
{
"
size
"
:
0
,
"
query
"
:
{
"
bool
"
:
{
"
must
"
:
{
"
match_all
"
:
{}
},
"
filter
"
:
filters
}
},
"
aggs
"
:
{
"
results
"
:
{
"
stats
"
:
{
"
field
"
:
custom_field
}
elif
field_type
in
[
'
integer
'
,
'
float
'
,
'
date
'
]:
body
[
'
aggs
'
]
=
{
"
results
"
:
{
"
stats
"
:
{
"
field
"
:
field_name
}
}
}
if
not
filters
:
body
[
'
query
'
][
'
bool
'
].
pop
(
'
filter
'
)
else
:
self
.
abort
(
400
,
'
Aggregations are only allowed on string, integer, float, data and boolean fields.
'
)
aggs
=
config
.
es
.
search
(
index
=
'
data_explorer
'
,
doc_type
=
'
flywheel
'
,
body
=
body
)[
'
aggregations
'
][
'
results
'
]
aggs
=
config
.
es
.
search
(
index
=
'
data_explorer
'
,
doc_type
=
'
flywheel
'
,
body
=
body
)[
'
aggregations
'
][
'
results
'
]
return
aggs
return
aggs
@require_login
def
get_facets
(
self
):
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment