告密者的下场(1/2)
从前有座山,叫巴里赫德,他一个当一座山,十分的爽。
哎,想不到吧。
这个山里面有树林,有庙,有这个山里灵活的狗。山上有棵树,这棵树不叫高树,因为这个梗太老了。这棵树的形状有些奇特,大概就长这个样子。
import torch
from torch import nn
import as F
import os
import tensorboardX
from import Dataset
from import DataLoader
device = ('cuda' if () else 'cpu')
class VGGBaseSimpleS2():
def __init__(self):
super(VGGBaseSimpleS2, self).__init__()
= (
(1, 12, kernel_size=3, stride=1, padding=1),
#(16),
()
)
# 6*6
= (kernel_size=2, stride=1)
# 5*5
= (
(12, 24, kernel_size=3, stride=1, padding=1),
()
)
= (kernel_size=2, stride=1)
# 4*4
= (
(24, 24, kernel_size=3, stride=1, padding=1),
()
)
= (kernel_size=2, stride=2)
# 2*2
# 2*2
= (24*2*2, 2)
def forward(self, x):
batchsize = (0)
out = (x)
out = (out)
out = (out)
out = (out)
out = (out)
out = (batchsize, -1)
out = (out)
out = (out, dim=1)
return out
class TrainingDataSet(Dataset):
def __init__(self):
super(TrainingDataSet, self).__init__()
= X_train
= y_train
def __getitem__(self, index):
t = [index, 0:36]
t = (t).view(6, 6)
return t, [index]
def __len__(self):
return len()
class TestDataSet(Dataset):
def __init__(self):
super(TestDataSet, self).__init__()
= X_validate
= y_validate
def __getitem__(self, index):
t = [index, 0:36]
t = (t).view(6, 6)
return t, [index]
def __len__(self):
return len()
def cnn_classification():
batch_size = 256
trainDataLoader = DataLoader(TrainingDataSet(), batch_size=batch_size, shuffle=False)
testDataLoader = DataLoader(TestDataSet(), batch_size=batch_size, shuffle=False)
epoch_num = 200
#lr =
lr =
net = VGGBaseSimpleS2().to(device)
print(net)
# loss
loss_func = ()
# optimizer
optimizer = ((), lr=lr)
# optimizer = ((), lr=lr, momentum=, weight_decay=5e-4)
scheduler = .StepLR(optimizer, step_size=5, gamma=)
if not (“logCNN“):
(“logCNN“)
writer = (“logCNN“)
for epoch in range(epoch_num):
train_sum_loss = 0
train_sum_correct = 0
train_sum_fp = 0
train_sum_fn = 0
train_sum_tp = 0
train_sum_tn = 0
for i, data in enumerate(trainDataLoader):
()
inputs, labels = data
inputs = (1).to()
labels = ()
本章未完,点击下一页继续阅读。