深度学习习题与总结

◇ 2.5 自动微分

▷ 5. 使f(x)=sin(x)f(x)=sin(x),绘制f(x)f(x)df(x)dx\frac{df(x)}{dx}的图像,其中后者不使用f(x)=cos(x)f'(x)=\cos(x)

代码:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import torch
import matplotlib.pyplot as plt
# import numpy

x = torch.arange(0, 10, 0.1)
x.requires_grad_(True)

y = torch.sin(x)

y.sum().backward()

print(f"x = \n {x}")
print(f"x.detach() = \n {x.detach()}")
print(f"x.detach().numpy() = \n {x.detach().numpy()}")
# print(f"x_numpy_16 = {x.detach().numpy().astype(numpy.float16)}")

print('\n','\n')

print(f"y = \n {y}")
print(f"y.detach() = \n {y.detach()}")
print(f"y.detach().numpy() = \n {y.detach().numpy()}")

print('\n','\n')

print(f"x.grad = \n {x.grad}")
print(f"x.grad.numpy() = \n {x.grad.numpy()}")

plt.plot(x.detach().numpy(), y.detach().numpy(), label='sin(x)', color='blue')
plt.plot(x.detach().numpy(), x.grad.numpy(), label='cos(x)', color='red')

plt.xlabel('x')
plt.ylabel('y')
plt.legend() # 为图表添加图例
plt.show()

结果:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
x = 
tensor([0.0000, 0.1000, 0.2000, 0.3000, 0.4000, 0.5000, 0.6000, 0.7000, 0.8000,
0.9000, 1.0000, 1.1000, 1.2000, 1.3000, 1.4000, 1.5000, 1.6000, 1.7000,
1.8000, 1.9000, 2.0000, 2.1000, 2.2000, 2.3000, 2.4000, 2.5000, 2.6000,
2.7000, 2.8000, 2.9000, 3.0000, 3.1000, 3.2000, 3.3000, 3.4000, 3.5000,
3.6000, 3.7000, 3.8000, 3.9000, 4.0000, 4.1000, 4.2000, 4.3000, 4.4000,
4.5000, 4.6000, 4.7000, 4.8000, 4.9000, 5.0000, 5.1000, 5.2000, 5.3000,
5.4000, 5.5000, 5.6000, 5.7000, 5.8000, 5.9000, 6.0000, 6.1000, 6.2000,
6.3000, 6.4000, 6.5000, 6.6000, 6.7000, 6.8000, 6.9000, 7.0000, 7.1000,
7.2000, 7.3000, 7.4000, 7.5000, 7.6000, 7.7000, 7.8000, 7.9000, 8.0000,
8.1000, 8.2000, 8.3000, 8.4000, 8.5000, 8.6000, 8.7000, 8.8000, 8.9000,
9.0000, 9.1000, 9.2000, 9.3000, 9.4000, 9.5000, 9.6000, 9.7000, 9.8000,
9.9000], requires_grad=True)
x.detach() =
tensor([0.0000, 0.1000, 0.2000, 0.3000, 0.4000, 0.5000, 0.6000, 0.7000, 0.8000,
0.9000, 1.0000, 1.1000, 1.2000, 1.3000, 1.4000, 1.5000, 1.6000, 1.7000,
1.8000, 1.9000, 2.0000, 2.1000, 2.2000, 2.3000, 2.4000, 2.5000, 2.6000,
2.7000, 2.8000, 2.9000, 3.0000, 3.1000, 3.2000, 3.3000, 3.4000, 3.5000,
3.6000, 3.7000, 3.8000, 3.9000, 4.0000, 4.1000, 4.2000, 4.3000, 4.4000,
4.5000, 4.6000, 4.7000, 4.8000, 4.9000, 5.0000, 5.1000, 5.2000, 5.3000,
5.4000, 5.5000, 5.6000, 5.7000, 5.8000, 5.9000, 6.0000, 6.1000, 6.2000,
6.3000, 6.4000, 6.5000, 6.6000, 6.7000, 6.8000, 6.9000, 7.0000, 7.1000,
7.2000, 7.3000, 7.4000, 7.5000, 7.6000, 7.7000, 7.8000, 7.9000, 8.0000,
8.1000, 8.2000, 8.3000, 8.4000, 8.5000, 8.6000, 8.7000, 8.8000, 8.9000,
9.0000, 9.1000, 9.2000, 9.3000, 9.4000, 9.5000, 9.6000, 9.7000, 9.8000,
9.9000])
x.detach().numpy() =
[0. 0.1 0.2 0.3 0.4 0.5
0.6 0.7 0.8 0.90000004 1. 1.1
1.2 1.3 1.4 1.5 1.6 1.7
1.8000001 1.9 2. 2.1 2.2 2.3
2.4 2.5 2.6000001 2.7 2.8000002 2.9
3. 3.1000001 3.2 3.3 3.4 3.5
3.6000001 3.7 3.8 3.9 4. 4.1
4.2 4.3 4.4 4.5 4.6 4.7
4.8 4.9 5. 5.1000004 5.2000003 5.3
5.4 5.5 5.6 5.7 5.7999997 5.9
6. 6.1 6.2 6.2999997 6.4 6.5
6.6 6.7000003 6.8 6.9 7. 7.1
7.2 7.2999997 7.3999996 7.5 7.6 7.7
7.7999997 7.8999996 8. 8.1 8.2 8.3
8.4 8.5 8.6 8.7 8.8 8.900001
9. 9.1 9.2 9.3 9.400001 9.5
9.6 9.7 9.8 9.9 ]



y =
tensor([ 0.0000, 0.0998, 0.1987, 0.2955, 0.3894, 0.4794, 0.5646, 0.6442,
0.7174, 0.7833, 0.8415, 0.8912, 0.9320, 0.9636, 0.9854, 0.9975,
0.9996, 0.9917, 0.9738, 0.9463, 0.9093, 0.8632, 0.8085, 0.7457,
0.6755, 0.5985, 0.5155, 0.4274, 0.3350, 0.2392, 0.1411, 0.0416,
-0.0584, -0.1577, -0.2555, -0.3508, -0.4425, -0.5298, -0.6119, -0.6878,
-0.7568, -0.8183, -0.8716, -0.9162, -0.9516, -0.9775, -0.9937, -0.9999,
-0.9962, -0.9825, -0.9589, -0.9258, -0.8835, -0.8323, -0.7728, -0.7055,
-0.6313, -0.5507, -0.4646, -0.3739, -0.2794, -0.1822, -0.0831, 0.0168,
0.1165, 0.2151, 0.3115, 0.4049, 0.4941, 0.5784, 0.6570, 0.7290,
0.7937, 0.8504, 0.8987, 0.9380, 0.9679, 0.9882, 0.9985, 0.9989,
0.9894, 0.9699, 0.9407, 0.9022, 0.8546, 0.7985, 0.7344, 0.6630,
0.5849, 0.5010, 0.4121, 0.3191, 0.2229, 0.1245, 0.0248, -0.0752,
-0.1743, -0.2718, -0.3665, -0.4575], grad_fn=<SinBackward0>)
y.detach() =
tensor([ 0.0000, 0.0998, 0.1987, 0.2955, 0.3894, 0.4794, 0.5646, 0.6442,
0.7174, 0.7833, 0.8415, 0.8912, 0.9320, 0.9636, 0.9854, 0.9975,
0.9996, 0.9917, 0.9738, 0.9463, 0.9093, 0.8632, 0.8085, 0.7457,
0.6755, 0.5985, 0.5155, 0.4274, 0.3350, 0.2392, 0.1411, 0.0416,
-0.0584, -0.1577, -0.2555, -0.3508, -0.4425, -0.5298, -0.6119, -0.6878,
-0.7568, -0.8183, -0.8716, -0.9162, -0.9516, -0.9775, -0.9937, -0.9999,
-0.9962, -0.9825, -0.9589, -0.9258, -0.8835, -0.8323, -0.7728, -0.7055,
-0.6313, -0.5507, -0.4646, -0.3739, -0.2794, -0.1822, -0.0831, 0.0168,
0.1165, 0.2151, 0.3115, 0.4049, 0.4941, 0.5784, 0.6570, 0.7290,
0.7937, 0.8504, 0.8987, 0.9380, 0.9679, 0.9882, 0.9985, 0.9989,
0.9894, 0.9699, 0.9407, 0.9022, 0.8546, 0.7985, 0.7344, 0.6630,
0.5849, 0.5010, 0.4121, 0.3191, 0.2229, 0.1245, 0.0248, -0.0752,
-0.1743, -0.2718, -0.3665, -0.4575])
y.detach().numpy() =
[ 0. 0.09983342 0.19866933 0.29552022 0.38941836 0.47942555
0.5646425 0.64421767 0.7173561 0.7833269 0.84147096 0.8912074
0.9320391 0.9635582 0.98544973 0.997495 0.9995736 0.9916648
0.9738476 0.9463001 0.9092974 0.8632094 0.80849636 0.74570525
0.6754631 0.5984721 0.51550126 0.42737985 0.33498797 0.23924924
0.14112 0.04158052 -0.05837419 -0.15774564 -0.2555412 -0.35078323
-0.4425206 -0.5298362 -0.61185783 -0.68776625 -0.7568025 -0.81827706
-0.87157565 -0.916166 -0.9516021 -0.9775301 -0.99369097 -0.9999232
-0.99616456 -0.9824526 -0.9589243 -0.92581457 -0.8834545 -0.83226734
-0.77276444 -0.7055403 -0.6312667 -0.5506857 -0.46460244 -0.37387657
-0.2794155 -0.1821626 -0.08308959 0.01681362 0.1165493 0.21511999
0.3115413 0.40485018 0.4941135 0.57843983 0.6569866 0.728969
0.79366773 0.85043645 0.8987079 0.93799996 0.96791965 0.9881682
0.9985433 0.99894136 0.98935825 0.9698897 0.94073063 0.90217173
0.8545991 0.7984871 0.7343968 0.66296935 0.58491707 0.5010204
0.4121185 0.319098 0.2228901 0.12445424 0.02477485 -0.07515112
-0.17432715 -0.27176043 -0.3664793 -0.45753556]



x.grad =
tensor([ 1.0000, 0.9950, 0.9801, 0.9553, 0.9211, 0.8776, 0.8253, 0.7648,
0.6967, 0.6216, 0.5403, 0.4536, 0.3624, 0.2675, 0.1700, 0.0707,
-0.0292, -0.1288, -0.2272, -0.3233, -0.4161, -0.5048, -0.5885, -0.6663,
-0.7374, -0.8011, -0.8569, -0.9041, -0.9422, -0.9710, -0.9900, -0.9991,
-0.9983, -0.9875, -0.9668, -0.9365, -0.8968, -0.8481, -0.7910, -0.7259,
-0.6536, -0.5748, -0.4903, -0.4008, -0.3073, -0.2108, -0.1122, -0.0124,
0.0875, 0.1865, 0.2837, 0.3780, 0.4685, 0.5544, 0.6347, 0.7087,
0.7756, 0.8347, 0.8855, 0.9275, 0.9602, 0.9833, 0.9965, 0.9999,
0.9932, 0.9766, 0.9502, 0.9144, 0.8694, 0.8157, 0.7539, 0.6845,
0.6084, 0.5261, 0.4385, 0.3466, 0.2513, 0.1534, 0.0540, -0.0460,
-0.1455, -0.2435, -0.3392, -0.4314, -0.5193, -0.6020, -0.6787, -0.7486,
-0.8111, -0.8654, -0.9111, -0.9477, -0.9748, -0.9922, -0.9997, -0.9972,
-0.9847, -0.9624, -0.9304, -0.8892])
x.grad.numpy() =
[ 1. 0.9950042 0.9800666 0.9553365 0.921061 0.87758255
0.8253356 0.7648422 0.6967067 0.6216099 0.54030234 0.45359612
0.3623577 0.26749888 0.16996716 0.0707372 -0.02919955 -0.12884454
-0.22720216 -0.32328954 -0.41614684 -0.50484604 -0.58850116 -0.666276
-0.7373938 -0.8011436 -0.85688883 -0.90407217 -0.9422224 -0.9709582
-0.9899925 -0.99913514 -0.9982948 -0.9874798 -0.9667982 -0.9364567
-0.8967584 -0.8481 -0.79096776 -0.72593224 -0.6536436 -0.57482404
-0.490261 -0.400799 -0.30733278 -0.2107958 -0.11215262 -0.01238885
0.08749917 0.18651247 0.2836622 0.3779781 0.46851692 0.5543745
0.63469297 0.7086698 0.7755658 0.8347127 0.8855194 0.9274785
0.96017027 0.98326844 0.9965421 0.9998586 0.9931849 0.97658765
0.9502326 0.91438305 0.8693974 0.815725 0.75390226 0.6845467
0.60835147 0.52607775 0.43854767 0.3466353 0.25125992 0.15337405
0.05395571 -0.04600174 -0.14550003 -0.24354452 -0.3391547 -0.43137702
-0.5192883 -0.6020119 -0.67872036 -0.7486465 -0.8110931 -0.8654355
-0.91113025 -0.9477217 -0.97484356 -0.99222535 -0.99969304 -0.9971722
-0.9846878 -0.9623649 -0.9304262 -0.8891913 ]

结果