You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
34 lines
1.1 KiB
Python
34 lines
1.1 KiB
Python
2 years ago
|
import torch
|
||
|
import torch.nn as nn
|
||
|
import pandas as pd
|
||
|
import numpy as np
|
||
|
from sklearn.preprocessing import MinMaxScaler
|
||
|
|
||
|
|
||
|
input_size = 1
|
||
|
hidden_size = 1
|
||
|
num_layers = 2
|
||
|
output_size = 1
|
||
|
|
||
|
class MLP(nn.Module):
|
||
|
def __init__(self, input_size=1, hidden_size=1, num_layers=2, output_size=1):
|
||
|
super(MLP, self).__init__()
|
||
|
# self.lstm = nn.LSTM(input_size, hidden_size, num_layers, batch_first=True)
|
||
|
# self.lstm = nn.LSTM(input_size, hidden_size, num_layers, batch_first=True)
|
||
|
# self.lstm=nn.RNN(input_size, hidden_size, num_layers, batch_first=True)
|
||
|
self.fc0=nn.Linear(input_size,hidden_size,bias=False)
|
||
|
self.relu = nn.ReLU()
|
||
|
self.fc = nn.Linear(hidden_size, output_size)
|
||
|
|
||
|
nn.init.constant_(self.fc0.weight,0.78)
|
||
|
nn.init.xavier_uniform_(self.fc.weight)
|
||
|
nn.init.zeros_(self.fc.bias)
|
||
|
|
||
|
def forward(self, x):
|
||
|
# out, _ = self.lstm(x)
|
||
|
out0=self.fc0(x)
|
||
|
# out1=self.relu(out0)
|
||
|
# print(f"out{out[0]}")
|
||
|
# print(f"out{out[-1]}")
|
||
|
# out2 = self.fc(out1)
|
||
|
return out0
|