Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
caokai1073 authored May 26, 2021
1 parent 18ef33a commit b7a5368
Show file tree
Hide file tree
Showing 5 changed files with 817 additions and 692 deletions.
104 changes: 52 additions & 52 deletions Model.py
Original file line number Diff line number Diff line change
@@ -1,52 +1,52 @@
from torchvision import models
import torch.nn as nn

class model(nn.Module):
def __init__(self, input_dim, output_dim):
super(model, self).__init__()
self.restored = False
self.input_dim = input_dim
self.output_dim = output_dim

num = len(input_dim)
feature = []

for i in range(num):
feature.append(
nn.Sequential(
nn.Linear(self.input_dim[i],2*self.input_dim[i]),
nn.BatchNorm1d(2*self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(2*self.input_dim[i],2*self.input_dim[i]),
nn.BatchNorm1d(2*self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(2*self.input_dim[i],self.input_dim[i]),
nn.BatchNorm1d(self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(self.input_dim[i],self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
))

self.feature = nn.ModuleList(feature)

self.feature_show = nn.Sequential(
nn.Linear(self.output_dim,self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
nn.Linear(self.output_dim,self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
nn.Linear(self.output_dim,self.output_dim),
)

def forward(self, input_data, domain):
feature = self.feature[domain](input_data)
feature = self.feature_show(feature)

return feature





from torchvision import models
import torch.nn as nn

class model(nn.Module):
def __init__(self, input_dim, output_dim):
super(model, self).__init__()
self.restored = False
self.input_dim = input_dim
self.output_dim = output_dim

num = len(input_dim)
feature = []

for i in range(num):
feature.append(
nn.Sequential(
nn.Linear(self.input_dim[i],2*self.input_dim[i]),
nn.BatchNorm1d(2*self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(2*self.input_dim[i],2*self.input_dim[i]),
nn.BatchNorm1d(2*self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(2*self.input_dim[i],self.input_dim[i]),
nn.BatchNorm1d(self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(self.input_dim[i],self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
))

self.feature = nn.ModuleList(feature)

self.feature_show = nn.Sequential(
nn.Linear(self.output_dim,self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
nn.Linear(self.output_dim,self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
nn.Linear(self.output_dim,self.output_dim),
)

def forward(self, input_data, domain):
feature = self.feature[domain](input_data)
feature = self.feature_show(feature)

return feature




Loading

0 comments on commit b7a5368

Please sign in to comment.