Skip to content

Commit

Permalink
PEP8 updates for ES query runner
Browse files Browse the repository at this point in the history
  • Loading branch information
adamlwgriffiths committed Nov 18, 2016
1 parent 2d7a497 commit 60a4c3d
Showing 1 changed file with 17 additions and 26 deletions.
43 changes: 17 additions & 26 deletions redash/query_runner/elasticsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,27 +17,27 @@
logger = logging.getLogger(__name__)

ELASTICSEARCH_TYPES_MAPPING = {
"integer" : TYPE_INTEGER,
"long" : TYPE_INTEGER,
"float" : TYPE_FLOAT,
"double" : TYPE_FLOAT,
"boolean" : TYPE_BOOLEAN,
"string" : TYPE_STRING,
"date" : TYPE_DATE,
"object" : TYPE_STRING,
"integer": TYPE_INTEGER,
"long": TYPE_INTEGER,
"float": TYPE_FLOAT,
"double": TYPE_FLOAT,
"boolean": TYPE_BOOLEAN,
"string": TYPE_STRING,
"date": TYPE_DATE,
"object": TYPE_STRING,
# "geo_point" TODO: Need to split to 2 fields somehow
}

ELASTICSEARCH_BUILTIN_FIELDS_MAPPING = {
"_id" : "Id",
"_score" : "Score"
"_id": "Id",
"_score": "Score"
}

PYTHON_TYPES_MAPPING = {
str: TYPE_STRING,
unicode: TYPE_STRING,
bool : TYPE_BOOLEAN,
int : TYPE_INTEGER,
bool: TYPE_BOOLEAN,
int: TYPE_INTEGER,
long: TYPE_INTEGER,
float: TYPE_FLOAT
}
Expand Down Expand Up @@ -145,9 +145,7 @@ def collect_value(mappings, row, key, value, type):
row[key] = value

def collect_aggregations(mappings, rows, parent_key, data, row, result_columns, result_columns_index):

if isinstance(data, dict):

for key, value in data.iteritems():
val = collect_aggregations(mappings, rows, parent_key if key == 'buckets' else key, value, row, result_columns, result_columns_index)
if val:
Expand All @@ -164,7 +162,6 @@ def collect_aggregations(mappings, rows, parent_key, data, row, result_columns,
return data[data_key]

elif isinstance(data, list):

for value in data:
result_row = get_row(rows, row)
collect_aggregations(mappings, rows, parent_key, value, result_row, result_columns, result_columns_index)
Expand All @@ -176,23 +173,20 @@ def collect_aggregations(mappings, rows, parent_key, data, row, result_columns,

return None

result_columns_index = {c["name"] : c for c in result_columns}
result_columns_index = {c["name"]: c for c in result_columns}

result_fields_index = {}
if result_fields:
for r in result_fields:
result_fields_index[r] = None

if 'error' in raw_result:

error = raw_result['error']
if len(error) > 10240:
error = error[:10240] + '... continues'

raise Exception(error)

elif 'aggregations' in raw_result:

if result_fields:
for field in result_fields:
add_column_if_needed(mappings, field, field, result_columns, result_columns_index)
Expand All @@ -202,9 +196,7 @@ def collect_aggregations(mappings, rows, parent_key, data, row, result_columns,

logger.debug("result_rows %s", str(result_rows))
logger.debug("result_columns %s", str(result_columns))

elif 'hits' in raw_result and 'hits' in raw_result['hits']:

if result_fields:
for field in result_fields:
add_column_if_needed(mappings, field, field, result_columns, result_columns_index)
Expand All @@ -224,7 +216,6 @@ def collect_aggregations(mappings, rows, parent_key, data, row, result_columns,

result_rows.append(row)
else:

raise Exception("Redash failed to parse the results it got from ElasticSearch.")

def test_connection(self):
Expand Down Expand Up @@ -310,8 +301,8 @@ def run_query(self, query, user):
raise Exception("Advanced queries are not supported")

json_data = json.dumps({
"columns" : result_columns,
"rows" : result_rows
"columns": result_columns,
"rows": result_rows
})
except KeyboardInterrupt:
error = "Query cancelled by user."
Expand Down Expand Up @@ -365,8 +356,8 @@ def run_query(self, query, user):
self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows)

json_data = json.dumps({
"columns" : result_columns,
"rows" : result_rows
"columns": result_columns,
"rows": result_rows
})
except KeyboardInterrupt:
error = "Query cancelled by user."
Expand Down

0 comments on commit 60a4c3d

Please sign in to comment.