Skip to content

Commit

Permalink
Fix hard sigmoid, fixed compiling warning of multiple output
Browse files Browse the repository at this point in the history
  • Loading branch information
majianjia committed Sep 10, 2020
1 parent 85f7b90 commit 723ac28
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 8 deletions.
4 changes: 2 additions & 2 deletions scripts/nnom.py
Original file line number Diff line number Diff line change
Expand Up @@ -937,13 +937,13 @@ def gen_weight_tensor(w, per_axis):
if (cfg['activation'] == 'relu'):
fp.write('\tlayer[%s] = model.active(act_relu(), layer[%s]);\n' % (id, LI[inp][0]))
elif (cfg['activation'] == 'tanh'):
fp.write('\tlayer[%s] = model.active(act_tanh(%s_OUTPUT_DEC), layer[%s]);\n' % (
fp.write('\tlayer[%s] = model.active(act_hard_tanh(%s_OUTPUT_DEC), layer[%s]);\n' % (
id, inp.upper(), LI[inp][0]))
elif (cfg['activation'] == 'sigmoid'):
fp.write('\tlayer[%s] = model.active(act_sigmoid(%s_OUTPUT_DEC), layer[%s]);\n' % (
id, inp.upper(), LI[inp][0]))
elif (cfg['activation'] == 'hard_sigmoid'):
fp.write('\tlayer[%s] = model.active(act_sigmoid(%s_OUTPUT_DEC), layer[%s]);\n' % (
fp.write('\tlayer[%s] = model.active(act_hard_sigmoid(%s_OUTPUT_DEC), layer[%s]);\n' % (
id, inp.upper(), LI[inp][0]))
elif (cfg['activation'] == 'softmax'):
fp.write('\tlayer[%s] = model.hook(Softmax(), layer[%s]);\n' % (id, LI[inp][0]))
Expand Down
6 changes: 3 additions & 3 deletions src/backends/nnom_local.c
Original file line number Diff line number Diff line change
Expand Up @@ -1557,7 +1557,7 @@ void local_softmax_q7(const q7_t *vec_in, const uint32_t dim_vec, q7_t *p_out)
// otherwise y = 0.2 * x + 0.5 (y=0.20315 * x + 0.5)
void local_hard_sigmoid_q7(q7_t *data, uint32_t size, int16_t dec_bit)
{
int16_t limit = 2.5f * (1<<dec_bit)-1;
int16_t limit = 2.5f * (1 << dec_bit)-1;
int16_t offset = 64; // 0.5 * 128
int16_t mult = 26; // 0.2 * 128

Expand All @@ -1570,7 +1570,7 @@ void local_hard_sigmoid_q7(q7_t *data, uint32_t size, int16_t dec_bit)
data[i] = 127;
else
{
data[i] = ((int16_t)data[i] * mult >> dec_bit) + offset;
data[i] = ((int16_t)(data[i] * mult) >> dec_bit) + offset;
}
}
}
Expand All @@ -1587,7 +1587,7 @@ void local_hard_tanh_q7(q7_t *data, uint32_t size, int16_t dec_bit)
if(dec_bit == 7)
return;

// int bit > 0
// int bit < 0
if(int_bit < 0)
for(int i=0; i<size; i++)
{
Expand Down
6 changes: 3 additions & 3 deletions src/core/nnom.c
Original file line number Diff line number Diff line change
Expand Up @@ -931,9 +931,9 @@ nnom_status_t model_compile(nnom_model_t *m, nnom_layer_t *input, nnom_layer_t *
NNOM_LOG("-------------------------------------------------------------------------------------------------\n");

// if model's tail is not the last layer which built by user.
if (output != layer_shortcut_find_last(input))
NNOM_LOG("WARNING: model returned at #%d %s layer, but this layer is not the end of shortcut list \n",
find_index(m->head, output), default_layer_names[output->type]);
if (output->type != NNOM_OUTPUT)
NNOM_LOG("WARNING: the last layer '%s' is not the Output Layer, please check carefully.\n",
default_layer_names[output->type]);

// get the total (aligned) memory requirement
buf_size = mem_analysis_result(m);
Expand Down

0 comments on commit 723ac28

Please sign in to comment.