-
Notifications
You must be signed in to change notification settings - Fork 28
/
language_only.py
executable file
·49 lines (40 loc) · 1.62 KB
/
language_only.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import torch
from mlp import MLP
import pdb
class LanguageEncoder(torch.nn.Module):
def __init__(self,
embedder,
encoder,
device):
super(LanguageEncoder, self).__init__()
self.lang_embedder = embedder
self.lang_embedder.device = device
self.lang_encoder = encoder
self.lang_encoder.device = device
self.mlp = MLP(input_dim = encoder.output_size,
hidden_dim = 64,
output_dim = 21,
num_layers = 3,
dropout = 0.20)
self.compute_block_dist = True
self.device = device
def forward(self, data_batch):
#pdb.set_trace()
lang_input = data_batch["command"]
lang_length = data_batch["length"]
# tensorize lengths
lengths = torch.tensor(lang_length).float()
lengths = lengths.to(self.device)
# embed language
lang_embedded = torch.cat([self.lang_embedder(lang_input[i]).unsqueeze(0) for i in range(len(lang_input))],
dim=0)
# encode
# USE CBOW FOR DEBUGGING
#mean_embedding = torch.sum(lang_embedded, dim = 1).repeat(1,2)
#lang_output = {"sentence_encoding": mean_embedding }
lang_output = self.lang_encoder(lang_embedded, lengths)
# get language output as sentence embedding
sent_encoding = lang_output["sentence_encoding"]
logits = self.mlp(sent_encoding)
#print(torch.softmax(logits, dim = 1)[0])
return {"pred_block_logits": logits}