提交 eeddf6a6 编写于 作者: C csdnstudent

Tue Oct 24 16:49:00 CST 2023 inscode

上级 d8c22349
run = "npm i && npm run dev"
language = "node"
[env]
PATH = "/root/${PROJECT_DIR}/.config/npm/node_global/bin:/root/${PROJECT_DIR}/node_modules/.bin:${PATH}"
XDG_CONFIG_HOME = "/root/.config"
npm_config_prefix = "/root/${PROJECT_DIR}/.config/npm/node_global"
\ No newline at end of file
npm_config_prefix = "/root/${PROJECT_DIR}/.config/npm/node_global"
[debugger]
program = "main.js"
......@@ -33,3 +33,51 @@ if (process.env.npm_config_preview || rawArgv.includes('--preview')) {
} else {
run(`vue-cli-service build ${args}`)
}
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
class CNN_Transformer(nn.Module):
def __init__(self, num_classes):
super(CNN_Transformer, self).__init__()
# Define CNN layers
self.conv1 = nn.Conv2d(3, 32, kernel_size=3, stride=1, padding=1)
self.conv2 = nn.Conv2d(32, 64, kernel_size=3, stride=1, padding=1)
self.fc1 = nn.Linear(64 * 16 * 16, 128)
# Define Transformer layers
self.embedding = nn.Linear(128, 512)
self.layer_norm = nn.LayerNorm(512)
self.self_attn = nn.MultiheadAttention(512, 8, dropout=0.1)
self.dropout1 = nn.Dropout(0.1)
self.linear1 = nn.Linear(512, 256)
self.dropout2 = nn.Dropout(0.5)
self.linear2 = nn.Linear(256, 128)
# Define output layer
self.out = nn.Linear(128, num_classes)
def forward(self, x: Tensor) -> Tensor:
# Process input with CNN
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = x.view(-1, 64 * 16 * 16)
x = F.relu(self.fc1(x))
# Process output with Transformer
x = self.embedding(x)
x = self.layer_norm(x.permute(1, 0, 2))
x, _ = self.self_attn(x, x, x)
x = self.dropout1(x)
x = F.relu(self.linear1(x))
x = self.dropout2(x)
x = F.relu(self.linear2(x))
# Process output with output layer
x = self.out(x)
return x
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册