This commit is contained in:
13002457275 2021-01-03 21:05:05 +08:00
parent 7342fce686
commit a452c8f6f0
2 changed files with 64 additions and 0 deletions

View File

@ -0,0 +1,25 @@
import numpy as np
import math
from matplotlib import pyplot as plt
import torch
def f(r):
g = 9*torch.tan(r)+9*torch.tan(3/4*math.pi-r)
return g
def drawfig(f):
x = torch.linspace(1.1/4*math.pi,0.99/2*math.pi,100)
y = f(x)
plt.plot(x,y)
plt.show()
r = torch.tensor([1.16/4*np.pi], requires_grad=True)
optimizer = torch.optim.Adam([r], lr=1e-4)
for step in range(10000):
pred = f(r) #1 向前
optimizer.zero_grad() #归0
pred.backward() #向后
optimizer.step() #下一步
if step % 200 == 0:
print (f'step {step}: r = {r.tolist()}, f(r) = {pred.item()}')
drawfig(f)

View File

@ -0,0 +1,39 @@
import math
import numpy as np
import matplotlib.pyplot as plt
def f(r):
g = 9*np.tan(r)+9*np.tan(3/4*np.pi-r)
return g
def drawfig(f):
x = np.linspace(1.01/4*np.pi,0.99/2*np.pi,100)
y = f(x)
# print(y)
plt.plot(x,y)
plt.show()
def grad(r):
g = 9.0/(np.cos(r)**2) - 9.0/(np.cos(3/4*np.pi-r)**2)
return g
def gradientDescent(f,r0, eta, nstep):
r_history = np.zeros(nstep+1)
f_history = np.zeros(nstep+1)
r_history[0] = r0
f_history[0] = f(r0)
r = r0
for i in range(1,nstep+1):
r = r - eta * grad(r)
r_history[i] = r
f_history[i] = f(r)
print(f"r is :{r} f(r) is {f(r)}")
return r_history,f_history
# print(f(1/3*np.pi))
drawfig(f)
x,y = gradientDescent(f, 1.22/4*np.pi, 0.0001*np.pi, 10000)
plt.plot(x,y)
plt.show()