File size: 1,237 Bytes
f836213 bf961d3 f836213 bf961d3 f836213 bf961d3 f836213 bf961d3 f836213 bf961d3 f836213 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import torch
import torch.nn as nn
import torchvision.transforms as transforms
from PIL import Image
import numpy as np
import cv2
# Simple emotion labels
EMOTIONS = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"]
# Lightweight CNN model (tiny custom)
class TinyEmotionNet(nn.Module):
def __init__(self):
super(TinyEmotionNet, self).__init__()
self.model = nn.Sequential(
nn.Flatten(),
nn.Linear(48*48, 256),
nn.ReLU(),
nn.Linear(256, 7),
nn.Softmax(dim=1)
)
def forward(self, x):
return self.model(x)
# Load tiny model with fixed random weights (works for demo)
def load_emotion_model():
model = TinyEmotionNet()
model.eval()
return model
emotion_model = load_emotion_model()
transform = transforms.Compose([
transforms.Grayscale(),
transforms.Resize((48, 48)),
transforms.ToTensor()
])
def predict_emotion(image: Image.Image):
img = image.convert("RGB")
img = transform(img).unsqueeze(0)
with torch.no_grad():
output = emotion_model(img)
prob = torch.max(output).item()
idx = torch.argmax(output).item()
return EMOTIONS[idx], float(prob)
|