Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

model initiating error #20

Open
pranavpawar3 opened this issue May 17, 2019 · 0 comments
Open

model initiating error #20

pranavpawar3 opened this issue May 17, 2019 · 0 comments

Comments

@pranavpawar3
Copy link

Hi, I am implementing the model on MovieLens dataset, I am facing an issue with model training.
When I start training on the dataset, it generates the following error,

`InvalidType:
Invalid operation is performed in: LinearFunction (Forward)

Expect: x.shape[1] == W.shape[1]
Actual: 5 != 950198`

The complete output of model.fit(X,y) is as follows:

`load dataset
The number of data, train: 950198 validate: 50011
prepare initialized model!

0%| | 0/5000 [00:00<?, ?it/s]


InvalidType Traceback (most recent call last)
in
----> 1 model.fit(X,y)

C:/Users/ppawar/Desktop/Genesys_PDP_code/ml-1m/learning2rank/rank\RankNet.py in fit(self, fit_X, fit_y, batchsize, n_iter, n_units1, n_units2, tv_ratio, optimizerAlgorithm, savefigName, savemodelName)
131 self.initializeModel(Model, train_X, n_units1, n_units2, optimizerAlgorithm)
132
--> 133 self.trainModel(train_X, train_y, validate_X, validate_y, n_iter)
134
135 plot_result.acc(self.train_loss, self.test_loss, savename=savefigName)

C:/Users/ppawar/Desktop/Genesys_PDP_code/ml-1m/learning2rank/rank\RankNet.py in trainModel(self, x_train, y_train, x_test, y_test, n_iter)
111 y_j = chainer.Variable(y_train[j])
112
--> 113 self.optimizer.update(self.model, x_i, x_j, y_i, y_j)
114
115 if (step + 1) % loss_step == 0:

~\AppData\Local\Continuum\anaconda3\envs\tensorflow_gpu_keras\lib\site-packages\chainer\optimizer.py in update(self, lossfun, *args, **kwds)
678 if lossfun is not None:
679 use_cleargrads = getattr(self, '_use_cleargrads', True)
--> 680 loss = lossfun(*args, **kwds)
681 if use_cleargrads:
682 self.target.cleargrads()

C:/Users/ppawar/Desktop/Genesys_PDP_code/ml-1m/learning2rank/rank\RankNet.py in call(self, x_i, x_j, t_i, t_j)
35 )
36 def call(self, x_i, x_j, t_i, t_j):
---> 37 s_i = self.l3(F.relu(self.l2(F.relu(self.l1(x_i)))))
38 s_j = self.l3(F.relu(self.l2(F.relu(self.l1(x_j)))))
39 s_diff = s_i - s_j

~\AppData\Local\Continuum\anaconda3\envs\tensorflow_gpu_keras\lib\site-packages\chainer\link.py in call(self, *args, **kwargs)
240 if forward is None:
241 forward = self.forward
--> 242 out = forward(*args, **kwargs)
243
244 # Call forward_postprocess hook

~\AppData\Local\Continuum\anaconda3\envs\tensorflow_gpu_keras\lib\site-packages\chainer\links\connection\linear.py in forward(self, x, n_batch_axes)
136 in_size = functools.reduce(operator.mul, x.shape[1:], 1)
137 self._initialize_params(in_size)
--> 138 return linear.linear(x, self.W, self.b, n_batch_axes=n_batch_axes)

~\AppData\Local\Continuum\anaconda3\envs\tensorflow_gpu_keras\lib\site-packages\chainer\functions\connection\linear.py in linear(x, W, b, n_batch_axes)
286 args = x, W, b
287
--> 288 y, = LinearFunction().apply(args)
289 if n_batch_axes > 1:
290 y = y.reshape(batch_shape + (-1,))

~\AppData\Local\Continuum\anaconda3\envs\tensorflow_gpu_keras\lib\site-packages\chainer\function_node.py in apply(self, inputs)
243
244 if configuration.config.type_check:
--> 245 self._check_data_type_forward(in_data)
246
247 hooks = chainer.get_function_hooks()

~\AppData\Local\Continuum\anaconda3\envs\tensorflow_gpu_keras\lib\site-packages\chainer\function_node.py in _check_data_type_forward(self, in_data)
328 in_type = type_check.get_types(in_data, 'in_types', False)
329 with type_check.get_function_check_context(self):
--> 330 self.check_type_forward(in_type)
331
332 def check_type_forward(self, in_types):

~\AppData\Local\Continuum\anaconda3\envs\tensorflow_gpu_keras\lib\site-packages\chainer\functions\connection\linear.py in check_type_forward(self, in_types)
25 x_type.ndim == 2,
26 w_type.ndim == 2,
---> 27 x_type.shape[1] == w_type.shape[1],
28 )
29 if type_check.eval(n_in) == 3:

~\AppData\Local\Continuum\anaconda3\envs\tensorflow_gpu_keras\lib\site-packages\chainer\utils\type_check.py in expect(*bool_exprs)
544 for expr in bool_exprs:
545 assert isinstance(expr, Testable)
--> 546 expr.expect()
547
548

~\AppData\Local\Continuum\anaconda3\envs\tensorflow_gpu_keras\lib\site-packages\chainer\utils\type_check.py in expect(self)
481 raise InvalidType(
482 '{0} {1} {2}'.format(self.lhs, self.exp, self.rhs),
--> 483 '{0} {1} {2}'.format(left, self.inv, right))
484
485

InvalidType:
Invalid operation is performed in: LinearFunction (Forward)

Expect: x.shape[1] == W.shape[1]
Actual: 5 != 950198
`

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant