Skip to content

Commit

Permalink
Update memnet.py
Browse files Browse the repository at this point in the history
Hello,

I made a small modification to the calculation of the locationed memory in this model. The previous implementation was not using the absolute distance context word - aspect as specified in the paper.

Hope you find this useful. If so, it will also require to include this change in the input_colse of the train.py file
'memnet': ['text_raw_without_aspect_indices', 'aspect_indices', 'text_left_with_aspect_indices']
  • Loading branch information
AlbertoPaz authored May 20, 2018
1 parent 52502d4 commit a4c208d
Showing 1 changed file with 13 additions and 4 deletions.
17 changes: 13 additions & 4 deletions models/memnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,19 @@


class MemNet(nn.Module):
def locationed_memory(self, memory, memory_len):

def locationed_memory(self, memory, memory_len, left_len, aspect_len):
# here we just simply calculate the location vector in Model2's manner
'''
Updated to calculate location as the absolute diference between context word and aspect
'''
for i in range(memory.size(0)):
for idx in range(memory_len[i]):
memory[i][idx] *= (1-float(idx)/int(memory_len[i]))
aspect_start = left_len[i] - aspect_len[i]
if idx < aspect_start: l = aspect_start.item() - idx # l = absolute distance to the aspect
else: l = idx +1 - aspect_start.item()
memory[i][idx] *= (1-float(l)/int(memory_len[i]))

return memory

def __init__(self, embedding_matrix, opt):
Expand All @@ -28,14 +36,15 @@ def __init__(self, embedding_matrix, opt):
self.dense = nn.Linear(opt.embed_dim, opt.polarities_dim)

def forward(self, inputs):
text_raw_without_aspect_indices, aspect_indices = inputs[0], inputs[1]
text_raw_without_aspect_indices, aspect_indices, left_with_aspect_indices = inputs[0], inputs[1], inputs[2]
left_len = torch.sum(left_with_aspect_indices != 0, dim = -1)
memory_len = torch.sum(text_raw_without_aspect_indices != 0, dim=-1)
aspect_len = torch.sum(aspect_indices != 0, dim=-1)
nonzeros_aspect = torch.tensor(aspect_len, dtype=torch.float).to(self.opt.device)

memory = self.embed(text_raw_without_aspect_indices)
memory = self.squeeze_embedding(memory, memory_len)
# memory = self.locationed_memory(memory, memory_len)
# memory = self.locationed_memory(memory, memory_len, left_len, aspect_len)
aspect = self.embed(aspect_indices)
aspect = torch.sum(aspect, dim=1)
aspect = torch.div(aspect, nonzeros_aspect.view(nonzeros_aspect.size(0), 1))
Expand Down

0 comments on commit a4c208d

Please sign in to comment.