File size: 1,248 Bytes
f730608
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import torch.nn as nn
import numpy as np
import torch

class GELUOp(torch.autograd.Function):
    @staticmethod
    def symbolic(g, x):
        return g.op('custom::Gelu', x)

    @staticmethod
    def forward(ctx, x):
        ctx.save_for_backward(x)
        return torch.nn.functional.gelu(x)

    @staticmethod
    def backward(ctx, grad_output):
        x, = ctx.saved_tensors
        grad_input = grad_output.clone()
        grad = 0.5 * (1 + torch.erf(x / np.sqrt(2))) + x * torch.exp(-0.5 * x ** 2) / np.sqrt(2 * torch.pi)
        return grad_input * grad

class GELU(nn.Module):
    def forward(self, x):
        return GELUOp.apply(x)

def gelu_fc(in_ch=3, in_dim=32, width=100, depth=4, omega=0.3, num_classes=10):
    layers = [nn.Flatten(), nn.Linear(in_ch*in_dim**2, width), GELU()]
    for _ in range(depth - 1):
        layers.extend([nn.Linear(width, width), GELU()])
    layers.append(nn.Linear(width, num_classes))
    return nn.Sequential(*layers)


def gelu_4fc_100(in_ch=3, in_dim=32):
    return gelu_fc(in_ch, in_dim, width=100, depth=4)


def gelu_4fc_200(in_ch=3, in_dim=32):
    return gelu_fc(in_ch, in_dim, width=200, depth=4)


def gelu_4fc_500(in_ch=3, in_dim=32):
    return gelu_fc(in_ch, in_dim, width=500, depth=4)