Skip to content

Commit

Permalink
make sure all networks have a broadcastable dimension, fix for certai…
Browse files Browse the repository at this point in the history
…n versions of theano
  • Loading branch information
EliasVansteenkiste committed Apr 10, 2017
1 parent d4913b4 commit c71ee09
Show file tree
Hide file tree
Showing 17 changed files with 19 additions and 17 deletions.
2 changes: 2 additions & 0 deletions configs_class_dsb/dsb_a_eliasq10_mal2_s5_p8a1_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ def candidates_prep_function(all_candidates, n_selection=None):

return selected_candidates

batch_size = 1

train_valid_ids = utils.load_pkl(pathfinder.VALIDATION_SPLIT_PATH)
train_pids, valid_pids, test_pids, stage2_pids = train_valid_ids['training'], train_valid_ids['validation'], train_valid_ids['test'], train_valid_ids['test_stage2']
print 'n train', len(train_pids)
Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq10_mal2_s5_p8a1_spl.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq11_mal2_s5_p8a1_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq11_mal2_s5_p8a1_spl.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq14_mal2_s5_p8a1_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq14_mal2_s5_p8a1_spl.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq15_mal7_s5_p8a1_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
4 changes: 2 additions & 2 deletions configs_class_dsb/dsb_a_eliasq15_mal7_s5_p8a1_spl.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ def load_pretrained_model(l_in):
b=nn.init.Constant(0))


metadata = utils.load_pkl(os.path.join("/home/eavsteen/dsb3/storage/metadata/dsb3/models/eavsteen/","r_fred_malignancy_7-20170404-163552.pkl"))
metadata = utils.load_pkl(os.path.join("/home/eavsteen/dsb3/storage/metadata/dsb3/models/","r_fred_malignancy_7-20170404-163552.pkl"))
nn.layers.set_all_param_values(l, metadata['param_values'])

return l
Expand All @@ -261,7 +261,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq1_mal2_s5_p8a1_spl.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq3_mal2_s5_p8a1_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq3_mal2_s5_p8a1_spl.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq4_mal2_s5_p8a1_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq4_mal2_s5_p8a1_spl.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq5_mal2_s5_p8a1_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq5_mal2_s5_p8a1_spl.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq6_mal2_s5_p8a1_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down
2 changes: 1 addition & 1 deletion configs_class_dsb/dsb_a_eliasq6_mal2_s5_p8a1_spl.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def load_pretrained_model(l_in):
def build_model():
l_in = nn.layers.InputLayer((None, n_candidates_per_patient,) + p_transform['patch_size'])
l_in_rshp = nn.layers.ReshapeLayer(l_in, (-1, 1,) + p_transform['patch_size'])
l_target = nn.layers.InputLayer((batch_size,))
l_target = nn.layers.InputLayer((None,))

l = load_pretrained_model(l_in_rshp)

Expand Down

0 comments on commit c71ee09

Please sign in to comment.