-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathActivation.py
More file actions
40 lines (31 loc) · 862 Bytes
/
Activation.py
File metadata and controls
40 lines (31 loc) · 862 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import torch
import numpy as np
import torch.nn.functional as F
from torch.autograd import Variable
import matplotlib.pyplot as plt
#fake
x = torch.linspace(-5,5,200)#从-5到5取200个线段数据
x = Variable(x)
x_np = x.data.numpy()
y_relu = F.relu(x).data.numpy()
y_sigmoid = F.sigmoid(x).data.numpy()
y_tanh = F.tanh(x).data.numpy()
y_softplus = F.softplus(x).data.numpy()
plt.figure(1,figsize=(8,6))
plt.subplot(221)
plt.plot(x_np, y_relu, c='red', label = 'relu')
plt.ylim((-1,5))
plt.legend(loc='best')
plt.subplot(222)
plt.plot(x_np, y_sigmoid, c='red', label = 'sigmoid')
plt.ylim((-0.2,1.2))
plt.legend(loc='best')
plt.subplot(223)
plt.plot(x_np, y_tanh, c='red', label = 'tanh')
plt.ylim((-1,5))
plt.legend(loc='best')
plt.subplot(224)
plt.plot(x_np, y_softplus, c='red', label = 'softplus')
plt.ylim((0,5))
plt.legend(loc='best')
plt.show()