Skip to content

Commit

Permalink
more info in result
Browse files Browse the repository at this point in the history
  • Loading branch information
bohanjason committed May 25, 2020
1 parent 3ff9698 commit 131645f
Showing 1 changed file with 15 additions and 7 deletions.
22 changes: 15 additions & 7 deletions server/website/website/tasks/async_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,11 +272,17 @@ def preprocessing(result_id, algorithm):
if not has_pipeline_data and session.tuning_session == 'tuning_session':
LOG.info("%s: Background tasks haven't ran for this workload yet, "
"picking data with lhs.", task_name)
target_data['debug'] = ("Background tasks haven't ran for this workload yet. "
"If this keeps happening, please make sure Celery periodic "
"tasks are running on the server.")
if results_cnt == 0 and session.tuning_session == 'tuning_session':
LOG.info("%s: Not enough data in this session, picking data with lhs.", task_name)
target_data['debug'] = "Not enough data in this session, picking data with lhs."
if skip_ddpg:
LOG.info("%s: The most recent result cannot be used by DDPG, picking data with lhs.",
task_name)
target_data['debug'] = ("The most recent result cannot be used by DDPG,"
"picking data with lhs.")

all_samples = JSONUtil.loads(session.lhs_samples)
if len(all_samples) == 0:
Expand Down Expand Up @@ -575,13 +581,14 @@ def check_early_return(target_data, algorithm):
newest_result = Result.objects.get(pk=result_id)
if target_data.get('status', 'good') != 'good': # No status or status is not 'good'
if target_data['status'] == 'random':
info = 'The config is generated by Random'
info = 'The config is generated by Random.'
elif target_data['status'] == 'lhs':
info = 'The config is generated by LHS'
info = 'The config is generated by LHS.'
elif target_data['status'] == 'range_test':
info = 'Searching for valid knob ranges'
info = 'Searching for valid knob ranges.'
else:
info = 'Unknown'
info = 'Unknown.'
info += ' ' + target_data.get('debug', '')
target_data_res = create_and_save_recommendation(
recommended_knobs=target_data['config_recommend'], result=newest_result,
status=target_data['status'], info=info, pipeline_run=None)
Expand Down Expand Up @@ -877,8 +884,9 @@ def configuration_recommendation(recommendation_input):
break

res = None

info_msg = 'INFO: training data size is {}. '.format(X_scaled.shape[0])
if algorithm == AlgorithmType.DNN:
info_msg += 'Recommended by DNN.'
# neural network model
model_nn = NeuralNet(n_input=X_samples.shape[1],
batch_size=X_samples.shape[0],
Expand All @@ -897,6 +905,7 @@ def configuration_recommendation(recommendation_input):
session.save()

elif algorithm == AlgorithmType.GPR:
info_msg += 'Recommended by GPR.'
# default gpr model
if params['GPR_USE_GPFLOW']:
LOG.debug("%s: Running GPR with GPFLOW.", task_name)
Expand Down Expand Up @@ -957,8 +966,7 @@ def configuration_recommendation(recommendation_input):

conf_map_res = create_and_save_recommendation(
recommended_knobs=conf_map, result=newest_result,
status='good', info='INFO: training data size is {}'.format(X_scaled.shape[0]),
pipeline_run=target_data['pipeline_run'])
status='good', info=info_msg, pipeline_run=target_data['pipeline_run'])

exec_time = save_execution_time(start_ts, "configuration_recommendation", newest_result)
LOG.debug("\n%s: Result = %s\n", task_name, _task_result_tostring(conf_map_res))
Expand Down

0 comments on commit 131645f

Please sign in to comment.