import torch import torch.nn.functional as F import torch.nn as nn import numpy as np TARGET_SIZE = (25, 25) class Net(nn.Module): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.conv1 = nn.Conv2d(1, 12, 5) # 24x24 -> 21x21 self.maxpool = nn.MaxPool2d(2, 2) # 21x21 -> 10x10 self.conv2 = nn.Conv2d(12, 16, 3) # 10x10 -> 8x8 self.conv3 = nn.Conv2d(16, 32, 3) # 8x8 -> 6x6 self.fc1 = nn.Linear(32 * 6 * 6, 120) self.fc2 = nn.Linear(120, 48) self.fc3 = nn.Linear(48, 2) def forward(self, x): x = F.relu(self.conv1(x)) x = self.maxpool(x) x = F.relu(self.conv2(x)) x = F.relu(self.conv3(x)) #print(x.shape) x = torch.flatten(x, 1) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.softmax(self.fc3(x)) return x def classify_stone_empty(s: list[np.ndarray], model: nn.Module) -> bool: s = torch.Tensor(s) s = s[:,:,:,0] s = s[:,torch.newaxis,:,:].float() / 255.0 s = s.to(0) return torch.argmax(model(s), dim=1) def load_model(f: str) -> torch.nn.Module: torch.autograd.set_grad_enabled(False) model = Net() model.load_state_dict(torch.load(f, weights_only=True)) model.eval() model.to(0) return model