49 lines
1.1 KiB
Python
49 lines
1.1 KiB
Python
"""
|
|
View more, visit my tutorial page: https://mofanpy.com/tutorials/
|
|
My Youtube Channel: https://www.youtube.com/user/MorvanZhou
|
|
|
|
Dependencies:
|
|
torch: 0.1.11
|
|
"""
|
|
import torch
|
|
import torch.nn.functional as F
|
|
|
|
|
|
# replace following class code with an easy sequential network
|
|
class Net(torch.nn.Module):
|
|
def __init__(self, n_feature, n_hidden, n_output):
|
|
super(Net, self).__init__()
|
|
self.hidden = torch.nn.Linear(n_feature, n_hidden) # hidden layer
|
|
self.predict = torch.nn.Linear(n_hidden, n_output) # output layer
|
|
|
|
def forward(self, x):
|
|
x = F.relu(self.hidden(x)) # activation function for hidden layer
|
|
x = self.predict(x) # linear output
|
|
return x
|
|
|
|
net1 = Net(1, 10, 1)
|
|
|
|
# easy and fast way to build your network
|
|
net2 = torch.nn.Sequential(
|
|
torch.nn.Linear(1, 10),
|
|
torch.nn.ReLU(),
|
|
torch.nn.Linear(10, 1)
|
|
)
|
|
|
|
|
|
print(net1) # net1 architecture
|
|
"""
|
|
Net (
|
|
(hidden): Linear (1 -> 10)
|
|
(predict): Linear (10 -> 1)
|
|
)
|
|
"""
|
|
|
|
print(net2) # net2 architecture
|
|
"""
|
|
Sequential (
|
|
(0): Linear (1 -> 10)
|
|
(1): ReLU ()
|
|
(2): Linear (10 -> 1)
|
|
)
|
|
""" |