35 lines
1.2 KiB
Python
35 lines
1.2 KiB
Python
from tinygrad import Tensor, nn
|
|
|
|
class Gen:
|
|
def __init__(self, height=128, width=216, latent_dim=128):
|
|
self.w = width // 4
|
|
self.h = height // 4
|
|
self.flat = 128 * self.h * self.w
|
|
self.ld = latent_dim
|
|
self.d1 = nn.Linear(latent_dim, self.flat)
|
|
self.d2 = nn.ConvTranspose2d(128, 64, kernel_size=3, stride=2, padding=1, output_padding=1)
|
|
self.d3 = nn.ConvTranspose2d(64, 1, kernel_size=3, stride=2, padding=1, output_padding=1)
|
|
|
|
def __call__(self, noise: Tensor) -> Tensor:
|
|
x = self.d1(noise).relu()
|
|
x = x.reshape(noise.shape[0], 128, self.h, self.w)
|
|
x = self.d2(x).relu()
|
|
x = self.d3(x)
|
|
return x.tanh()
|
|
|
|
|
|
class Check:
|
|
def __init__(self, height=128, width=216):
|
|
self.w = width // 4
|
|
self.h = height // 4
|
|
self.flat = 128 * self.h * self.w
|
|
self.e1 = nn.Conv2d(1, 64, kernel_size=3, stride=2, padding=1)
|
|
self.e2 = nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=1)
|
|
self.out = nn.Linear(self.flat, 1)
|
|
|
|
def __call__(self, x: Tensor) -> Tensor:
|
|
x = self.e1(x).relu()
|
|
x = self.e2(x).relu()
|
|
x = x.reshape(x.shape[0], -1)
|
|
return self.out(x)#.sigmoid()
|