Skip to content

Commit

Permalink
Merge pull request fastai#576 from sck/master
Browse files Browse the repository at this point in the history
Fix typo: classifer -> classifier.  Also keep around an alias for get_…
  • Loading branch information
jph00 authored Jun 25, 2018
2 parents ea1dd8f + a81b4bb commit d5120f6
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 5 deletions.
2 changes: 1 addition & 1 deletion courses/dl2/imdb_scripts/train_clas.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def train_clas(dir_path, cuda_id, lm_id='', clas_id=None, bs=64, cl=1, backwards
#dps = np.array([0.65,0.48,0.039,0.335,0.34])*dropmult
#dps = np.array([0.6,0.5,0.04,0.3,0.4])*dropmult

m = get_rnn_classifer(bptt, 20*70, c, vs, emb_sz=em_sz, n_hid=nh, n_layers=nl, pad_token=1,
m = get_rnn_classifier(bptt, 20*70, c, vs, emb_sz=em_sz, n_hid=nh, n_layers=nl, pad_token=1,
layers=[em_sz*3, 50, c], drops=[dps[4], 0.1],
dropouti=dps[0], wdrop=dps[1], dropoute=dps[2], dropouth=dps[3])

Expand Down
1 change: 1 addition & 0 deletions fastai/lm_rnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,3 +243,4 @@ def get_rnn_classifier(bptt, max_seq, n_class, n_tok, emb_sz, n_hid, n_layers, p
dropouth=dropouth, dropouti=dropouti, dropoute=dropoute, wdrop=wdrop, qrnn=qrnn)
return SequentialRNN(rnn_enc, PoolingLinearClassifier(layers, drops))

get_rnn_classifer=get_rnn_classifier
6 changes: 3 additions & 3 deletions fastai/models/nasnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,9 +486,9 @@ def forward(self, x, x_prev):

class NASNetALarge(nn.Module):

def __init__(self, use_classifer=False, num_classes=1001):
def __init__(self, use_classifier=False, num_classes=1001):
super(NASNetALarge, self).__init__()
self.use_classifer,self.num_classes = use_classifer,num_classes
self.use_classifier,self.num_classes = use_classifier,num_classes

self.conv0 = nn.Sequential()
self.conv0.add_module('conv', nn.Conv2d(in_channels=3, out_channels=96, kernel_size=3, padding=0, stride=2,
Expand Down Expand Up @@ -586,7 +586,7 @@ def classifier(self, x):

def forward(self, x):
x = self.features(x)
if self.use_classifer: x = self.classifier(x)
if self.use_classifier: x = self.classifier(x)
return x


Expand Down
2 changes: 1 addition & 1 deletion fastai/nlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,7 @@ def to_model(self, m, opt_fn):
return RNN_Learner(self, model, opt_fn=opt_fn)

def get_model(self, opt_fn, max_sl, bptt, emb_sz, n_hid, n_layers, dropout, **kwargs):
m = get_rnn_classifer(bptt, max_sl, self.c, self.nt,
m = get_rnn_classifier(bptt, max_sl, self.c, self.nt,
layers=[emb_sz*3, self.c], drops=[dropout],
emb_sz=emb_sz, n_hid=n_hid, n_layers=n_layers, pad_token=self.pad_idx, **kwargs)
return self.to_model(m, opt_fn)
Expand Down

0 comments on commit d5120f6

Please sign in to comment.