GenBaB / cifar /models /lstm.py
zhouxingshi's picture
Move files
c303ecb
import torch
import torch.nn as nn
class LSTM(nn.Module):
def __init__(self, in_channels=3, width=100, patch_size=8, num_classes=10):
super().__init__()
self.patch_size = patch_size
self.width = width
self.num_classes = num_classes
self.projection = nn.Conv2d(
in_channels, width, kernel_size=patch_size, stride=patch_size)
self.cell_f = nn.LSTMCell(width, width)
self.fc = nn.Linear(width, num_classes)
def forward(self, x):
embed = self.projection(x)
embed = torch.flatten(embed, 2).permute(0, 2, 1)
h_f = torch.zeros(x.shape[0], self.width, device=x.device)
c_f = h_f.clone()
for i in range(embed.shape[1]):
h_f, c_f = self.cell_f(embed[:, i], (h_f, c_f))
logits = self.fc(h_f)
return logits
def LSTM_patch_16_32(in_ch=3, in_dim=32):
assert in_ch == 3 and in_dim == 32
return LSTM(width=32, patch_size=16)
def LSTM_patch_16_64(in_ch=3, in_dim=32):
assert in_ch == 3 and in_dim == 32
return LSTM(width=64, patch_size=16)