Skip to content

Commit ba42a95

Browse files
committed
added 12 with slides
1 parent d45e5f5 commit ba42a95

4 files changed

+226
-4
lines changed

11_softmax_and_crossentropy.py

+8-4
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
# sum = 1.0
1212
#
1313

14-
# Softmax applies the exponential function to each element, and notmalizes
14+
# Softmax applies the exponential function to each element, and normalizes
1515
# by dividing by the sum of all these exponentials
1616
# -> squashes the output to be between 0 and 1 = probability
1717
# sum of all probabilities is 1
@@ -54,16 +54,15 @@ def cross_entropy(actual, predicted):
5454
loss = nn.CrossEntropyLoss()
5555
# loss(input, target)
5656

57-
# target is of size nBatch = 1
57+
# target is of size nSamples = 1
5858
# each element has class label: 0, 1, or 2
5959
# Y (=target) contains class labels, not one-hot
6060
Y = torch.tensor([0])
6161

62-
# input is of size nBatch x nClasses = 1 x 3
62+
# input is of size nSamples x nClasses = 1 x 3
6363
# y_pred (=input) must be raw, unnormalizes scores (logits) for each class, not softmax
6464
Y_pred_good = torch.tensor([[2.0, 1.0, 0.1]])
6565
Y_pred_bad = torch.tensor([[0.5, 2.0, 0.3]])
66-
6766
l1 = loss(Y_pred_good, Y)
6867
l2 = loss(Y_pred_bad, Y)
6968

@@ -98,6 +97,11 @@ def cross_entropy(actual, predicted):
9897
print(f'Batch Loss1: {l1.item():.4f}')
9998
print(f'Batch Loss2: {l2.item():.4f}')
10099

100+
# get predictions
101+
_, predictions1 = torch.max(Y_pred_good, 1)
102+
_, predictions2 = torch.max(Y_pred_bad, 1)
103+
print(f'Actual class: {Y}, Y_pred1: {predictions1}, Y_pred2: {predictions2}')
104+
101105
# Binary classification
102106
class NeuralNet1(nn.Module):
103107
def __init__(self, input_size, hidden_size):

12_activation_functions.py

+74
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
# output = w*x + b
2+
# output = activation_function(output)
3+
import torch
4+
import torch.nn as nn
5+
import torch.nn.functional as F
6+
7+
x = torch.tensor([-1.0, 1.0, 2.0, 3.0])
8+
9+
# sofmax
10+
output = torch.softmax(x, dim=0)
11+
print(output)
12+
sm = nn.Softmax(dim=0)
13+
output = sm(x)
14+
print(output)
15+
16+
# sigmoid
17+
output = torch.sigmoid(x)
18+
print(output)
19+
s = nn.Sigmoid()
20+
output = s(x)
21+
print(output)
22+
23+
#tanh
24+
output = torch.tanh(x)
25+
print(output)
26+
t = nn.Tanh()
27+
output = t(x)
28+
print(output)
29+
30+
# relu
31+
output = torch.relu(x)
32+
print(output)
33+
relu = nn.ReLU()
34+
output = relu(x)
35+
print(output)
36+
37+
# leaky relu
38+
output = F.leaky_relu(x)
39+
print(output)
40+
lrelu = nn.LeakyReLU()
41+
output = lrelu(x)
42+
print(output)
43+
44+
#nn.ReLU() creates an nn.Module which you can add e.g. to an nn.Sequential model.
45+
#torch.relu on the other side is just the functional API call to the relu function,
46+
#so that you can add it e.g. in your forward method yourself.
47+
48+
# option 1 (create nn modules)
49+
class NeuralNet(nn.Module):
50+
def __init__(self, input_size, hidden_size):
51+
super(NeuralNet, self).__init__()
52+
self.linear1 = nn.Linear(input_size, hidden_size)
53+
self.relu = nn.ReLU()
54+
self.linear2 = nn.Linear(hidden_size, 1)
55+
self.sigmoid = nn.Sigmoid()
56+
57+
def forward(self, x):
58+
out = self.linear1(x)
59+
out = self.relu(out)
60+
out = self.linear2(out)
61+
out = self.sigmoid(out)
62+
return out
63+
64+
# option 2 (use activation functions directly in forward pass)
65+
class NeuralNet(nn.Module):
66+
def __init__(self, input_size, hidden_size):
67+
super(NeuralNet, self).__init__()
68+
self.linear1 = nn.Linear(input_size, hidden_size)
69+
self.linear2 = nn.Linear(hidden_size, 1)
70+
71+
def forward(self, x):
72+
out = torch.relu(self.linear1(x))
73+
out = torch.sigmoid(self.linear2(out))
74+
return out

12_plot_activations.py

+144
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,144 @@
1+
import numpy as np
2+
import matplotlib.pyplot as plt
3+
4+
##### Sigmoid
5+
sigmoid = lambda x: 1 / (1 + np.exp(-x))
6+
7+
x=np.linspace(-10,10,10)
8+
9+
y=np.linspace(-10,10,100)
10+
11+
fig = plt.figure()
12+
plt.plot(y,sigmoid(y),'b', label='linspace(-10,10,100)')
13+
14+
plt.grid(linestyle='--')
15+
16+
plt.xlabel('X Axis')
17+
18+
plt.ylabel('Y Axis')
19+
20+
plt.title('Sigmoid Function')
21+
22+
plt.xticks([-4, -3, -2, -1, 0, 1, 2, 3, 4])
23+
plt.yticks([-2, -1, 0, 1, 2])
24+
25+
plt.ylim(-2, 2)
26+
plt.xlim(-4, 4)
27+
28+
plt.show()
29+
#plt.savefig('sigmoid.png')
30+
31+
fig = plt.figure()
32+
33+
##### TanH
34+
tanh = lambda x: 2*sigmoid(2*x)-1
35+
36+
x=np.linspace(-10,10,10)
37+
38+
y=np.linspace(-10,10,100)
39+
40+
plt.plot(y,tanh(y),'b', label='linspace(-10,10,100)')
41+
42+
plt.grid(linestyle='--')
43+
44+
plt.xlabel('X Axis')
45+
46+
plt.ylabel('Y Axis')
47+
48+
plt.title('TanH Function')
49+
50+
plt.xticks([-4, -3, -2, -1, 0, 1, 2, 3, 4])
51+
plt.yticks([-4, -3, -2, -1, 0, 1, 2, 3, 4])
52+
53+
plt.ylim(-4, 4)
54+
plt.xlim(-4, 4)
55+
56+
plt.show()
57+
#plt.savefig('tanh.png')
58+
59+
fig = plt.figure()
60+
61+
##### ReLU
62+
relu = lambda x: np.where(x>=0, x, 0)
63+
64+
x=np.linspace(-10,10,10)
65+
66+
y=np.linspace(-10,10,1000)
67+
68+
plt.plot(y,relu(y),'b', label='linspace(-10,10,100)')
69+
70+
plt.grid(linestyle='--')
71+
72+
plt.xlabel('X Axis')
73+
74+
plt.ylabel('Y Axis')
75+
76+
plt.title('ReLU')
77+
78+
plt.xticks([-4, -3, -2, -1, 0, 1, 2, 3, 4])
79+
plt.yticks([-4, -3, -2, -1, 0, 1, 2, 3, 4])
80+
81+
plt.ylim(-4, 4)
82+
plt.xlim(-4, 4)
83+
84+
plt.show()
85+
#plt.savefig('relu.png')
86+
87+
fig = plt.figure()
88+
89+
##### Leaky ReLU
90+
leakyrelu = lambda x: np.where(x>=0, x, 0.1*x)
91+
92+
x=np.linspace(-10,10,10)
93+
94+
y=np.linspace(-10,10,1000)
95+
96+
plt.plot(y,leakyrelu(y),'b', label='linspace(-10,10,100)')
97+
98+
plt.grid(linestyle='--')
99+
100+
plt.xlabel('X Axis')
101+
102+
plt.ylabel('Y Axis')
103+
104+
plt.title('Leaky ReLU')
105+
106+
plt.xticks([-4, -3, -2, -1, 0, 1, 2, 3, 4])
107+
plt.yticks([-4, -3, -2, -1, 0, 1, 2, 3, 4])
108+
109+
plt.ylim(-4, 4)
110+
plt.xlim(-4, 4)
111+
112+
plt.show()
113+
#plt.savefig('lrelu.png')
114+
115+
fig = plt.figure()
116+
117+
118+
##### Binary Step
119+
bstep = lambda x: np.where(x>=0, 1, 0)
120+
121+
x=np.linspace(-10,10,10)
122+
123+
y=np.linspace(-10,10,1000)
124+
125+
plt.plot(y,bstep(y),'b', label='linspace(-10,10,100)')
126+
127+
plt.grid(linestyle='--')
128+
129+
plt.xlabel('X Axis')
130+
131+
plt.ylabel('Y Axis')
132+
133+
plt.title('Step Function')
134+
135+
plt.xticks([-4, -3, -2, -1, 0, 1, 2, 3, 4])
136+
plt.yticks([-2, -1, 0, 1, 2])
137+
138+
plt.ylim(-2, 2)
139+
plt.xlim(-4, 4)
140+
141+
plt.show()
142+
#plt.savefig('step.png')
143+
144+
print('done')

slides/ActivationFunctions.pdf

275 KB
Binary file not shown.

0 commit comments

Comments
 (0)