This commit is contained in:
13002457275 2023-02-24 23:10:33 +08:00
parent 4433d57fed
commit 9eff845195
38 changed files with 2787 additions and 2077 deletions

View File

@ -3,5 +3,5 @@
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (venv)" project-jdk-type="Python SDK" />
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.11 (regression)" project-jdk-type="Python SDK" />
</project>

View File

@ -4,7 +4,7 @@
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="jdk" jdkName="Python 3.9 (venv)" jdkType="Python SDK" />
<orderEntry type="jdk" jdkName="Python 3.11 (regression)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -1,45 +1,45 @@
class Resource():
def __enter__(self):
print('===connect to resource===')
return self
def __exit__(self, exc_type, exc_val, exc_tb):
print('===close resource connection===')
print(exc_type)
print(exc_val)
print(exc_tb)
return True
def operate(self):
1/0
with Resource() as res:
res.operate()
# class Resource():
# def __enter__(self):
# print('===connect to resource===')
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb):
# print('===close resource connection===')
# print(exc_type)
# print(exc_val)
# print(exc_tb)
# return True
#
# def operate(self):
# 1/0
#
# with Resource() as res:
# res.operate()
import contextlib
@contextlib.contextmanager
def open_func(file_name):
# __enter__方法
print('open file:', file_name, 'in __enter__')
file_handler = open(file_name, 'r')
try:
yield file_handler
except Exception as exc:
# deal with exception
print('the exception was thrown')
finally:
print('close file:', file_name, 'in __exit__')
file_handler.close()
return
with open_func('mytest.txt') as file_in:
for line in file_in:
1/0
print(line)
# import contextlib
#
# @contextlib.contextmanager
# def open_func(file_name):
# # __enter__方法
# print('open file:', file_name, 'in __enter__')
# file_handler = open(file_name, 'r')
#
# try:
# yield file_handler
# except Exception as exc:
# # deal with exception
# print('the exception was thrown')
# finally:
# print('close file:', file_name, 'in __exit__')
# file_handler.close()
#
# return
#
# with open_func('mytest.txt') as file_in:
# for line in file_in:
# 1/0
# print(line)
import contextlib

View File

@ -0,0 +1,42 @@
# https://www.cnblogs.com/wozijisun/p/16635365.html
def singleton(cls):
_instance_dict = {} # 采用字典,可以装饰多个类,控制多个类实现单例模式
def inner(*args, **kwargs):
if cls not in _instance_dict:
_instance_dict[cls] = cls(*args, **kwargs)
return _instance_dict.get(cls)
return inner
@singleton
class Teacher:
def __init__(self, name, age):
self.name = name
self.age = age
@singleton
class Student:
def __init__(self, name, age):
self.name = name
self.age = age
def get(self):
t1 = Teacher("aa",52)
return t1
# def __new__(cls, *args, **kwargs): # 将方法3的这部分代码搬到了函数装饰器中
# if not cls._instance:
# cls._instance = super().__new__(cls)
# return cls._instan
stu1 = Student('bb', 18)
stu2 = Student('jack', 18)
stu3 = Student('aaa', 19)
print(stu1 is stu3)
print(stu1.__dict__, stu3.__dict__)
t1 = Student('bb', 18).get()
t2 = Student('bb', 18).get()
print(t1 is t2)

109
pandasSQL例子/demo.py Normal file
View File

@ -0,0 +1,109 @@
from sklearn.datasets import load_iris
import pandas as pd
from pandasql import sqldf
from pandasql import load_meat, load_births
import re
births = load_births()
meat = load_meat()
iris = load_iris()
iris_df = pd.DataFrame(iris.data, columns=iris.feature_names)
iris_df['species'] = pd.Categorical.from_codes(iris.target, iris.target_names)
iris_df.columns = [re.sub("[() ]", "", col) for col in iris_df.columns]
print(sqldf("SELECT * FROM iris_df LIMIT 10;", locals()))
print(sqldf("SELECT sepalwidthcm, species FROM iris_df LIMIT 10;", locals()))
q = """
select
species
, avg(sepalwidthcm)
, min(sepalwidthcm)
, max(sepalwidthcm)
from
iris_df
group by
species;
"""
print("*" * 80)
print("aggregation")
print("-" * 80)
print(q)
print(sqldf(q, locals()))
def pysqldf(q):
"add this to your script if you get tired of calling locals()"
return sqldf(q, globals())
print("*" * 80)
print("calling from a helper function")
print('''def pysqldf(q):)
"add this to your script if you get tired of calling locals()"
return sqldf(q, globals())''')
print("-" * 80)
print(q)
print(pysqldf(q))
q = """
select
a.*
from
iris_df a
inner join
iris_df b
on a.species = b.species
limit 10;
"""
print("*" * 80)
print("joins")
print("-" * 80)
print(q)
print(pysqldf(q))
q = """
select
*
from
iris_df
where
species = 'virginica'
and sepallengthcm > 7.7;
"""
print("*" * 80)
print("where clause")
print("-" * 80)
print(q)
print(pysqldf(q))
iris_df['id'] = range(len(iris_df))
q = """
select
*
from
iris_df
where
id in (select id from iris_df where sepalwidthcm*sepallengthcm > 25);
"""
print("*" * 80)
print("subqueries")
print("-" * 80)
print(q)
print(pysqldf(q))
q = """
SELECT
m.*
, b.births
FROM
meat m
INNER JOIN
births b
on m.date = b.date
ORDER BY
m.date;
"""
print(pysqldf(q).head())

View File

@ -1,77 +0,0 @@
#coding:utf-8
# https://mp.weixin.qq.com/s/ac-CgZj-avmPBraVvQUuBQ
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
class Function():
def __init__(self):
self.points_x = np.linspace(-20, 20, 1000)
self.points_y = self.f(self.points_x)
def f(self,x):
return (0.15*x)**2 + np.cos(x) + np.sin(3*x)/3 + np.cos(5*x)/5 + np.sin(7*x)/7
def df(self,x):
return (9/200)*x - np.sin(x) -np.sin(5*x) + np.cos(3*x) + np.cos(7*x)
def ddf(self,x):
return (9/200) - np.cos(x) -3*np.sin(x) - 5*np.cos(5*x) -7* np.sin(7*x)
# #
# #
# # # AdaGrad
# # for i in range(15):
# # # 绘制原来的函数
# # plt.plot(points_x, points_y, c="b", alpha=0.5, linestyle="-")
# # # 算法开始
# # lr = pow(1.5,-i)*32
# # delta = 1e-7
# # x = -20
# # r = 0
# # AdaGrad_x, AdaGrad_y = [], []
# # for it in range(1000):
# # AdaGrad_x.append(x), AdaGrad_y.append(f(x))
# # g = df(x)
# # r = r + g*g # 积累平方梯度
# # x = x - lr /(delta + np.sqrt(r)) * g
# #
# # plt.xlim(-20, 20)
# # plt.ylim(-2, 10)
# # plt.plot(AdaGrad_x, AdaGrad_y, c="r", linestyle="-")
# # plt.scatter(AdaGrad_x[-1],AdaGrad_y[-1],90,marker = "x",color="g")
# # plt.title("AdaGrad,lr=%f"%(lr))
# # plt.savefig("AdaGrad,lr=%f"%(lr) + ".png")
# # plt.clf()
# #
# #
# # # RMSProp
# # for i in range(15):
# # # 绘制原来的函数
# # plt.plot(points_x, points_y, c="b", alpha=0.5, linestyle="-")
# # # 算法开始
# # lr = pow(1.5,-i)*32
# # delta = 1e-6
# # rou = 0.8
# # x = -20
# # r = 0
# # RMSProp_x, RMSProp_y = [], []
# # for it in range(1000):
# # RMSProp_x.append(x), RMSProp_y.append(f(x))
# # g = df(x)
# # r = rou * r + (1-rou)*g*g # 积累平方梯度
# # x = x - lr /(delta + np.sqrt(r)) * g
# #
# # plt.xlim(-20, 20)
# # plt.ylim(-2, 10)
# # plt.plot(RMSProp_x, RMSProp_y, c="r", linestyle="-")
# # plt.scatter(RMSProp_x[-1],RMSProp_y[-1],90,marker = "x",color="g")
# # plt.title("RMSProp,lr=%f,rou=%f"%(lr,rou))
# # plt.savefig("RMSProp,lr=%f,rou=%f"%(lr,rou) + ".png")
# # plt.clf()
# #
# #

61
优化算法/AdaFactor.py Normal file
View File

@ -0,0 +1,61 @@
import numpy as np
from function import *
import math
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
# fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
s1 ,s2 = 0,0
eps = 1e-5
deltax1,deltax1 = 0,0
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
s1 = beta*s1+(1-beta)* dx1 ** 2
s2 = beta*s2+(1-beta)* dx2 ** 2
dx1 =
g1 = math.sqrt(s2)
deltax1,deltax2 = beta*deltax1+(1-beta)* dx1 ** 2
lr1,lr2 = lr/ math.sqrt(s1),lr/ math.sqrt(s2)
x1 -= lr1 * dx1
x2 -= lr2 * dx2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

61
优化算法/Adadelta.py Normal file
View File

@ -0,0 +1,61 @@
import numpy as np
from function import *
import math
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
# fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
s1 ,s2 = 0,0
eps = 1e-5
deltax1,deltax1 = 0,0
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
s1 = beta*s1+(1-beta)* dx1 ** 2
s2 = beta*s2+(1-beta)* dx2 ** 2
dx1 =
g1 = math.sqrt(s2)
deltax1,deltax2 = beta*deltax1+(1-beta)* dx1 ** 2
lr1,lr2 = lr/ math.sqrt(s1),lr/ math.sqrt(s2)
x1 -= lr1 * dx1
x2 -= lr2 * dx2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,53 @@
import numpy as np
from function import *
import math
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
# fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
lr = 0.5
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
s1 ,s2 = 0.0001,0.0001
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
s1 += math.fabs(dx1)/2
s2 += math.fabs(dx2)/2
lr1,lr2 = lr/s1,lr/ s2
x1 -= lr1 * dx1
x2 -= lr2 * dx2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,53 @@
import numpy as np
from function import *
import math
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
# fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
lr = 0.5
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
s1 ,s2 = 0.0001,0.0001
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
s1 += math.fabs(dx1)/2
s2 += math.fabs(dx2)/2
lr1,lr2 = lr/s1,lr/ s2
x1 -= lr1 * dx1
x2 -= lr2 * dx2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

53
优化算法/Adagrad.py Normal file
View File

@ -0,0 +1,53 @@
import numpy as np
from function import *
import math
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
# fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
lr = 0.5
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
s1 ,s2 = 0,0
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
s1 += dx1 ** 2
s2 += dx2 ** 2
lr1,lr2 = lr/ math.sqrt(s1),lr/ math.sqrt(s2)
x1 -= lr1 * dx1
x2 -= lr2 * dx2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

View File

@ -1,36 +0,0 @@
# Adam
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from AA import Function
ff = Function()
for i in range(48):
# 绘制原来的函数
plt.plot(ff.points_x, ff.points_y, c="b", alpha=0.5, linestyle="-")
# 算法开始
lr = pow(1.2,-i)*2
rou1,rou2 = 0.9,0.9 # 原来的算法中rou2=0.999,但是效果很差
delta = 1e-8
x = -20
s,r = 0,0
t = 0
Adam_x, Adam_y = [], []
for it in range(1000):
Adam_x.append(x), Adam_y.append(ff.f(x))
t += 1
g = ff.df(x)
s = rou1 * s + (1 - rou1)*g
r = rou2 * r + (1 - rou2)*g*g # 积累平方梯度
s = s/(1-pow(rou1,t))
r = r/(1-pow(rou2,t))
x = x - lr /(delta + np.sqrt(r)) * s
plt.xlim(-20, 20)
plt.ylim(-2, 10)
plt.plot(Adam_x, Adam_y, c="r", linestyle="-")
plt.scatter(Adam_x[-1],Adam_y[-1],90,marker = "X",color="g")
plt.title("Adam,lr=%f"%(lr))
# plt.savefig("Adam,lr=%f"%(lr) + ".png")
plt.show()
plt.clf()

59
优化算法/Adma.py Normal file
View File

@ -0,0 +1,59 @@
import numpy as np
from function import *
import math
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
# fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
lr = 0.2
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
beta2 = 0.5
z = eval(fName)(x1, x2)
z_history.append(z)
s1 ,s2 = 0,0
v1, v2 = 0, 0
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
v1 = beta * v1 + (1 - beta) * dx1
v2 = beta * v2 + (1 - beta) * dx2
s1 = beta2*s1+(1-beta2)* dx1 ** 2
s2 = beta2*s2+(1-beta2)* dx2 ** 2
lr1,lr2 = lr/ math.sqrt(s1),lr/ math.sqrt(s2)
x1 -= lr1 * v1
x2 -= lr2 * v2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

50
优化算法/Nesterov.py Normal file
View File

@ -0,0 +1,50 @@
import numpy as np
from function import *
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
lr = 0.01
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
v1,v2 = 0,0
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
v1 = beta * v1 + (1-beta)* dx1
v2 = beta * v2 + (1-beta) * dx2
x1 -= lr * v1
x2 -= lr * v2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

44
优化算法/NewTon.py Normal file
View File

@ -0,0 +1,44 @@
import numpy as np
from function import *
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
# fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
ddfName = 'ddf_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
for epoch in range(epochs):
alpha = pow(1.2, -epoch) * 20
dx1, dx2 = eval(dfName)(x1, x2)
ddx1,ddx2 = eval(ddfName)(x1, x2)
x1 -= dx1/(alpha+ddx1)
x2 -= dx2/(alpha+ddx2)
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

53
优化算法/RMSProp.py Normal file
View File

@ -0,0 +1,53 @@
import numpy as np
from function import *
import math
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
# fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
lr = 0.2
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
s1 ,s2 = 0,0
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
s1 = beta*s1+(1-beta)* dx1 ** 2
s2 = beta*s2+(1-beta)* dx2 ** 2
lr1,lr2 = lr/ math.sqrt(s1),lr/ math.sqrt(s2)
x1 -= lr1 * dx1
x2 -= lr2 * dx2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

83
优化算法/function.py Normal file
View File

@ -0,0 +1,83 @@
import optuna
import plotly
from optuna.visualization import plot_optimization_history
import numpy as np
import plotly.graph_objects as go
def himmelblau(x1, x2):
# rosenbrock函数
return ((x1**2) + x1 - 11)**2 + (x1 + x2 ** 2 - 7) ** 2
def df_himmelblau(x1, x2):
return (x1 ** 2 + x1 - 11) *(4*x1+2) + (x1 + x2 ** 2 - 7) * 2, 2*(x1 + x2 **2 -7)*2*x2
def ddf_himmelblau(x1, x2):
return 12*x1**2+12*x1+2,4*x1+12*x2**2-28
def rosenbrock(x1, x2):
# rosenbrock函数
return (1 - x1) ** 2 + 10 * (x2 - x1 ** 2) ** 2
def df_rosenbrock(x1, x2):
return -2 + 2 * x1 - 40 * (x2 - x1 ** 2) * x1, 20 * (x2 - x1 ** 2)
def semicircle(x1, x2):
# 半圆函数
return x1 ** 2 + x2 ** 2
def df_semicircle(x1, x2):
return x1 * 2 , x2 * 2
def semicircle2(x1, x2):
return 0.1 * x1 ** 2 + 2 * x2 ** 2
def df_semicircle2(x1, x2):
return x1 * 0.2 , x2 * 4
def plotF(fName,x,y,grid):
x1 = np.arange(-x, x, grid)
x2 = np.arange(-y, y, grid)
x1, x2 = np.meshgrid(x1, x2)
fig = go.Figure(data = [
go.Surface(
contours = { "x": {"show": True, "start": -4, "end": 4, "size": 0.4, "color":"white"}, "y": {"show": True, "start": -4, "end": 4, "size": 0.4, "color":"white"}, "z": {"show": True, "start": 0.5, "end": 800, "size": 5} },
x = x1,
y = x2,
z = fName(x1,x2),
opacity=0.2),
])
fig.show()
def plotAll(fName,x1_history,x2_history):
x1 = np.arange(-5, 5, 0.5)
x2 = np.arange(-5, 5, 0.5)
x1, x2 = np.meshgrid(x1, x2)
z_history = eval(fName)(x1_history, x2_history)
fig = go.Figure(data=[
go.Surface(
contours={"x": {"show": True, "start": -4, "end": 4, "size": 0.4, "color": "white"},
"y": {"show": True, "start": -4, "end": 4, "size": 0.4, "color": "white"},
"z": {"show": True, "start": 0.5, "end": 800, "size": 5}},
x=x1,
y=x2,
z=eval(fName)(x1, x2),
opacity=0.2),
go.Scatter3d(
x=x1_history, y=x2_history, z=z_history,
mode='lines+markers',
marker=dict(size=4, colorscale='Viridis', opacity=1),
line=dict(width=2)
),
])
fig.show()
def main():
# plotF(rosenbrock,2,8,0.1)
plotF(himmelblau, 5, 5, 0.5)
# plotF(semicircle, 5, 5, 0.5)
if __name__ == '__main__':
main()

View File

@ -1,2 +0,0 @@
lr = pow(2,-2)*16
print(lr)

View File

@ -1,29 +0,0 @@
# 动量 + 梯度下降法
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from AA import Function
ff = Function()
for i in range(10):
# 绘制原来的函数
plt.plot(ff.points_x, ff.points_y, c="b", alpha=0.5, linestyle="-")
# 算法开始
lr = 0.002
m = 1 - pow(0.5,i)
x = -20
v = 1.0
GDM_x, GDM_y = [], []
for it in range(1000):
GDM_x.append(x), GDM_y.append(ff.f(x))
v = m * v - lr * ff.df(x)
x = x + v
plt.xlim(-20, 20)
plt.ylim(-2, 10)
plt.plot(GDM_x, GDM_y, c="r", linestyle="-")
plt.scatter(GDM_x[-1],GDM_y[-1],90,marker = "x",color="g")
plt.title("Gradient descent + momentum,lr=%f,m=%f"%(lr,m))
# plt.savefig("Gradient descent + momentum,lr=%f,m=%f"%(lr,m) + ".png")
plt.show()
plt.clf()

51
优化算法/动量法.py Normal file
View File

@ -0,0 +1,51 @@
import numpy as np
from function import *
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
lr = 0.01
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
v1,v2 = 0,0
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
v1 = beta * v1 + (1-beta)* dx1
v2 = beta * v2 + (1-beta) * dx2
x1 -= lr * v1
x2 -= lr * v2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,89 @@
import numpy as np
from function import *
import wandb
import math
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective():
x1 = 0.1
x2 = 0.1
lr = 0.01
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
v1,v2 = 0,0
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
v1 = beta * v1 + (1-beta)* dx1
v2 = beta * v2 + (1-beta) * dx2
x1 -= lr * v1
x2 -= lr * v2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
wandb.log({
'loss': z,
})
return eval(fName)(x1, x2)
def RMS():
x1 = 0.1
x2 = 0.1
lr = 0.2
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
s1 ,s2 = 0,0
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
s1 = beta*s1+(1-beta)* dx1 ** 2
s2 = beta*s2+(1-beta)* dx2 ** 2
lr1,lr2 = lr/ math.sqrt(s1),lr/ math.sqrt(s2)
x1 -= lr1 * dx1
x2 -= lr2 * dx2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
wandb.log({
'loss': z,
})
return eval(fName)(x1, x2)
def main():
RMS()
plotAll(fName,np.array(x1_history), np.array(x2_history))
if __name__ == '__main__':
wandb.init(project="优化算法", name="RMS")
main()
wandb.finish()

View File

@ -0,0 +1 @@
# http://zh.d2l.ai/chapter_optimization/lr-scheduler.html

View File

@ -0,0 +1,46 @@
import numpy as np
from function import *
z_history = []
x1_history = []
x2_history = []
# fName = 'semicircle'
# fName = 'rosenbrock'
# fName = 'semicircle2'
fName = 'himmelblau'
dfName = 'df_'+fName
def objective(trial):
x1 = 0.1
x2 = 0.1
lr = 0.002
# x1 = trial.suggest_float("x1", 0, 3)
# x2 = trial.suggest_float("x2", 0, 3)
beta = 0.5
# lr = trial.suggest_float("lr", 0, 0.1)
epochs = 15
z = eval(fName)(x1, x2)
z_history.append(z)
for epoch in range(epochs):
dx1, dx2 = eval(dfName)(x1, x2)
x1 -= lr * dx1
x2 -= lr * dx2
x1_history.append(x1)
x2_history.append(x2)
z = eval(fName)(x1, x2)
z_history.append(z)
return eval(fName)(x1, x2)
def main():
study = optuna.create_study(direction="minimize")
study.optimize(objective, n_trials=1, timeout=600)
plot_optimization_history(study).show()
plotAll(fName,np.array(x1_history), np.array(x2_history))
print(study.best_value,study.best_trial)
if __name__ == '__main__':
main()

View File

@ -1,29 +0,0 @@
# # 牛顿法
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from AA import Function
ff = Function()
for i in range(72):
# 绘制原来的函数
plt.plot(ff.points_x, ff.points_y, c="b", alpha=0.5, linestyle="-")
# 算法开始
alpha= pow(1.2,-i)*20
x = -20.0
Newton_x, Newton_y = [], []
for it in range(1000):
Newton_x.append(x), Newton_y.append(ff.f(x))
g = ff.df(x)
gg = ff.ddf(x)
x = x - g/(gg+alpha)
plt.xlim(-20, 20)
plt.ylim(-2, 10)
plt.plot(Newton_x, Newton_y, c="r", linestyle="-")
plt.scatter(Newton_x[-1],Newton_y[-1],90,marker = "x",color="g")
plt.title("Newton,alpha=%f"%(alpha))
# plt.savefig("Newton,alpha=%f"%(alpha) + ".png")
plt.show()
plt.clf()

View File

@ -1,26 +0,0 @@
# 纯粹的梯度下降法,GD
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from AA import Function
ff = Function()
for i in range(10):
# 绘制原来的函数
plt.plot(ff.points_x, ff.points_y, c="b", alpha=0.5, linestyle="-")
# 算法开始
lr = pow(2,-i)*16
x = -20.0
GD_x, GD_y = [], []
for it in range(1000):
GD_x.append(x), GD_y.append(ff.f(x))
dx = ff.df(x)
x = x - lr * dx
plt.xlim(-20, 20)
plt.ylim(-2, 10)
plt.plot(GD_x, GD_y, c="r", linestyle="--")
plt.title("Gradient descent,lr=%f"%(lr))
# plt.savefig("Gradient descent,lr=%f"%(lr) + ".png")
plt.show()
plt.clf()

Binary file not shown.

Before

Width:  |  Height:  |  Size: 266 KiB

After

Width:  |  Height:  |  Size: 232 KiB

View File

@ -1,366 +1,366 @@
3186.00
3192.00
3174.00
3165.00
3177.00
3189.00
3158.00
3174.00
3164.00
3162.00
3147.00
3162.00
3158.00
3161.00
3153.00
3153.00
3157.00
3161.00
3144.00
3138.00
3140.00
3146.00
3107.00
3125.00
3129.00
3135.00
3178.00
3189.00
3186.00
3182.00
3110.00
3112.00
3124.00
3093.00
3096.00
3110.00
3087.00
3088.00
3070.00
3077.00
3088.00
3073.00
3078.00
3080.00
3078.00
3068.00
3047.00
3075.00
3070.00
3043.00
3047.00
3082.00
3048.00
3027.00
3075.00
3067.00
3050.00
3039.00
3119.00
3036.00
3011.00
3016.00
3004.00
2983.00
2987.00
2993.00
3055.00
3097.00
3086.00
3121.00
3093.00
3085.00
3081.00
3076.00
3078.00
3113.00
3071.00
3070.00
3132.00
3134.00
3136.00
3107.00
3122.00
3124.00
3137.00
3101.00
3095.00
3113.00
3111.00
3084.00
3133.00
3127.00
3142.00
3128.00
3161.00
3146.00
3135.00
3132.00
3125.00
3153.00
3139.00
3142.00
3137.00
3123.00
3118.00
3134.00
3119.00
3111.00
3133.00
3105.00
3120.00
3112.00
3122.00
3122.00
3114.00
3109.00
3131.00
3115.00
3149.00
3149.00
3143.00
3125.00
3162.00
3160.00
3147.00
3198.00
3219.00
3233.00
3240.00
3244.00
3243.00
3273.00
3276.00
3278.00
3301.00
3298.00
3294.00
3309.00
3303.00
3309.00
3306.00
3308.00
3305.00
3317.00
3370.00
3300.00
3306.00
3320.00
3303.00
3304.00
3288.00
3278.00
3297.00
3281.00
3282.00
3281.00
3294.00
3262.00
3306.00
3267.00
3241.00
3268.00
3246.00
3259.00
3254.00
3258.00
3242.00
3239.00
3246.00
3268.00
3274.00
3226.00
3242.00
3214.00
3186.00
3191.00
3178.00
3167.00
3153.00
3161.00
3128.00
3142.00
3126.00
3152.00
3147.00
3137.00
3151.00
3141.00
3105.00
3154.00
3123.00
3125.00
3116.00
3115.00
3102.00
3132.00
3122.00
3131.00
3131.00
3130.00
3116.00
3135.00
3135.00
3152.00
3124.00
3130.00
3130.00
3119.00
3135.00
3123.00
3127.00
3106.00
3120.00
3120.00
3113.00
3111.00
3118.00
3125.00
3123.00
3123.00
3120.00
3119.00
3116.00
3084.00
3095.00
3122.00
3119.00
3114.00
3141.00
3140.00
3147.00
3135.00
3159.00
3153.00
3139.00
3171.00
3152.00
3148.00
3150.00
3156.00
3152.00
3154.00
3169.00
3169.00
3178.00
3172.00
3154.00
3191.00
3178.00
3184.00
3206.00
3196.00
3224.00
3227.00
3227.00
3218.00
3229.00
3225.00
3233.00
3244.00
3253.00
3237.00
3244.00
3260.00
3242.00
3254.00
3227.00
3224.00
3226.00
3212.00
3218.00
3219.00
3224.00
3201.00
3212.00
3212.00
3194.00
3210.00
3206.00
3202.00
3178.00
3190.00
3203.00
3200.00
3178.00
3191.00
3199.00
3212.00
3198.00
3232.00
3226.00
3212.00
3200.00
3207.00
3208.00
3196.00
3223.00
3225.00
3218.00
3220.00
3238.00
3230.00
3229.00
3238.00
3235.00
3248.00
3237.00
3251.00
3253.00
3257.00
3228.00
3237.00
3249.00
3266.00
3258.00
3248.00
3264.00
3267.00
3262.00
3267.00
3275.00
3279.00
3274.00
3269.00
3289.00
3274.00
3292.00
3291.00
3319.00
3319.00
3318.00
3328.00
3344.00
3331.00
3351.00
3353.00
3353.00
3340.00
3375.00
3371.00
3381.00
3366.00
3403.00
3409.00
3401.00
3397.00
3403.00
3414.00
3394.00
3412.00
3392.00
3400.00
3387.00
3381.00
3403.00
3394.00
3414.00
3418.00
3414.00
3406.00
3393.00
3391.00
3421.00
3406.00
3430.00
3413.00
4140.00
4141.00
4147.00
4212.00
4460.00
4630.00
4661.00
4644.00
4621.00
4658.00
4650.00
4652.00
4656.00
4688.00
4708.00
4695.00
4706.00
4775.00
4816.00
4827.00
4844.00
4853.00
4832.00
4892.00
4915.00
4944.00
4916.00
4948.00
4974.00
5014.00
5056.00
5104.00
5147.00
5207.00
5177.00
5215.00
5147.00
5166.00
5127.00
5121.00
5041.00
4987.00
4955.00
4939.00
4901.00
4887.00
4854.00
4868.00
4855.00
4854.00
4884.00
4864.00
4876.00
4853.00
4862.00
4837.00
4851.00
4827.00
4800.00
4789.00
4786.00
4815.00
4820.00
4798.00
4776.00
4761.00
4787.00
4774.00
4770.00
4756.00
4757.00
4720.00
4729.00
4705.00
4690.00
4650.00
4671.00
4649.00
4622.00
4637.00
4615.00
4583.00
4582.00
4579.00
4555.00
4605.00
4582.00
4576.00
4584.00
4570.00
4571.00
4579.00
4547.00
4578.00
4575.00
4515.00
4552.00
4566.00
4552.00
4550.00
4546.00
4515.00
4534.00
4537.00
4549.00
4533.00
4538.00
4551.00
4544.00
4541.00
4553.00
4539.00
4539.00
4536.00
4522.00
4518.00
4523.00
4514.00
4525.00
4508.00
4511.00
4534.00
4521.00
4520.00
4472.00
4446.00
4444.00
4439.00
4402.00
4421.00
4402.00
4419.00
4412.00
4420.00
4376.00
4382.00
4383.00
4370.00
4362.00
4368.00
4358.00
4377.00
4367.00
4367.00
4354.00
4363.00
4369.00
4339.00
4363.00
4350.00
4377.00
4362.00
4355.00
4323.00
4329.00
4338.00
4339.00
4343.00
4335.00
4358.00
4355.00
4365.00
4360.00
4357.00
4377.00
4375.00
4386.00
4378.00
4358.00
4366.00
4360.00
4385.00
4373.00
4376.00
4371.00
4385.00
4381.00
4392.00
4368.00
4399.00
4383.00
4404.00
4410.00
4404.00
4420.00
4425.00
4449.00
4442.00
4445.00
4468.00
4475.00
4478.00
4502.00
4488.00
4503.00
4511.00
4514.00
4522.00
4544.00
4561.00
4557.00
4578.00
4586.00
4596.00
4621.00
4663.00
4720.00
4735.00
4789.00
4807.00
4810.00
4837.00
4842.00
4845.00
4921.00
5098.00
5144.00
5185.00
5237.00
5178.00
5195.00
5255.00
5239.00
5260.00
5249.00
5257.00
5231.00
5288.00
5232.00
5234.00
5243.00
5257.00
5291.00
5302.00
5307.00
5344.00
5365.00
5349.00
5339.00
5531.00
5548.00
5599.00
5598.00
5587.00
5606.00
5603.00
5610.00
5593.00
5617.00
5634.00
5652.00
5652.00
5612.00
5598.00
5597.00
5636.00
5668.00
5646.00
5696.00
5695.00
5681.00
5718.00
5729.00
5733.00
5744.00
5739.00
5755.00
5744.00
5761.00
5777.00
5719.00
5692.00
5650.00
5618.00
5623.00
5589.00
5598.00
5591.00
5557.00
5556.00
5551.00
5533.00
5554.00
5514.00
5511.00
5451.00
5458.00
5410.00
5424.00
5386.00
5382.00
5370.00
5371.00
5340.00
5330.00
5305.00
5297.00
5300.00
5239.00
5271.00
5280.00
5255.00
5264.00
5232.00
5196.00
5152.00
5137.00
5148.00
5153.00
5144.00
5161.00
5142.00
5109.00
5102.00
5083.00
5133.00
5128.00
5101.00
5079.00
5065.00
5063.00
5058.00
5039.00
5028.00
5032.00
5023.00
5013.00
4987.00
5006.00
5006.00
4988.00
4996.00
4980.00
5005.00
5002.00
4986.00
4999.00
4989.00
4980.00
4989.00
4992.00
4978.00
4974.00
4966.00
4970.00
4968.00
4994.00
4971.00
4972.00
4980.00
4966.00
4970.00
4956.00
4972.00
4953.00
4922.00
4974.00
4948.00
4947.00
4930.00
4926.00
4933.00
4902.00
4918.00
4920.00
4872.00

Binary file not shown.

Before

Width:  |  Height:  |  Size: 236 KiB

After

Width:  |  Height:  |  Size: 341 KiB

View File

@ -1,366 +1,366 @@
1965.00
1990.00
1977.00
1991.00
1988.00
1995.00
2002.00
1982.00
1987.00
1982.00
1987.00
1989.00
1992.00
1991.00
1980.00
1985.00
1979.00
1971.00
1975.00
1967.00
1968.00
1976.00
1971.00
1972.00
1960.00
1976.00
1966.00
1970.00
1967.00
1970.00
1969.00
1968.00
1971.00
1970.00
1963.00
1968.00
1973.00
1973.00
1953.00
1965.00
1967.00
1955.00
1962.00
1965.00
1959.00
1962.00
1975.00
1955.00
1965.00
1988.00
1964.00
1962.00
1961.00
1947.00
1946.00
1949.00
1950.00
1957.00
1975.00
1943.00
1948.00
1930.00
1960.00
1947.00
1946.00
1926.00
1939.00
1945.00
1941.00
1958.00
1955.00
1959.00
1951.00
1973.00
1973.00
1962.00
1955.00
1965.00
1976.00
1986.00
1989.00
1977.00
2000.00
1981.00
2010.00
1986.00
1999.00
2018.00
2004.00
2004.00
2022.00
2021.00
2021.00
2028.00
2033.00
2031.00
2022.00
2016.00
2027.00
2043.00
2034.00
2027.00
2026.00
2022.00
1996.00
2018.00
2008.00
2002.00
1994.00
2016.00
2003.00
1993.00
1994.00
1997.00
1984.00
1998.00
1986.00
1998.00
2007.00
2003.00
2015.00
1998.00
1995.00
1987.00
1999.00
1998.00
2001.00
2006.00
1997.00
2009.00
1987.00
1999.00
1990.00
2009.00
2004.00
2010.00
2017.00
2028.00
2022.00
2034.00
2026.00
2032.00
2024.00
2030.00
2033.00
2024.00
2039.00
2030.00
2037.00
2040.00
2050.00
2057.00
2062.00
2041.00
2048.00
2048.00
2061.00
2070.00
2060.00
2057.00
2073.00
2072.00
2075.00
2081.00
2081.00
2080.00
2090.00
2080.00
2085.00
2098.00
2094.00
2096.00
2104.00
2118.00
2107.00
2112.00
2107.00
2113.00
2113.00
2102.00
2123.00
2111.00
2112.00
2118.00
2128.00
2122.00
2125.00
2114.00
2105.00
2106.00
2099.00
2095.00
2098.00
2097.00
2114.00
2111.00
2117.00
2105.00
2107.00
2114.00
2103.00
2127.00
2108.00
2113.00
2104.00
2129.00
2120.00
2115.00
2114.00
2103.00
2119.00
2127.00
2134.00
2131.00
2141.00
2155.00
2144.00
2142.00
2124.00
2128.00
2148.00
2147.00
2148.00
2160.00
2158.00
2162.00
2177.00
2178.00
2165.00
2176.00
2176.00
2172.00
2181.00
2178.00
2203.00
2213.00
2190.00
2216.00
2211.00
2212.00
2205.00
2197.00
2215.00
2208.00
2200.00
2218.00
2225.00
2242.00
2247.00
2255.00
2259.00
2268.00
2278.00
2288.00
2316.00
2316.00
2309.00
2343.00
2341.00
2344.00
2367.00
2352.00
2362.00
2368.00
2354.00
2362.00
2359.00
2364.00
2364.00
2335.00
2378.00
2345.00
2368.00
2376.00
2339.00
2367.00
2354.00
2377.00
2351.00
2362.00
2342.00
2355.00
2345.00
2343.00
2346.00
2354.00
2358.00
2350.00
2340.00
2339.00
2321.00
2329.00
2347.00
2333.00
2356.00
2350.00
2324.00
2325.00
2326.00
2347.00
2331.00
2340.00
2339.00
2334.00
2316.00
2327.00
2330.00
2335.00
2337.00
2327.00
2316.00
2319.00
2320.00
2327.00
2320.00
2344.00
2303.00
2312.00
2323.00
2325.00
2330.00
2320.00
2323.00
2313.00
2316.00
2325.00
2324.00
2333.00
2338.00
2345.00
2341.00
2341.00
2339.00
2369.00
2360.00
2383.00
2358.00
2399.00
2391.00
2407.00
2419.00
2420.00
2438.00
2444.00
2430.00
2445.00
2451.00
2443.00
2456.00
2466.00
2460.00
2458.00
2452.00
2421.00
2436.00
2457.00
2465.00
2460.00
2468.00
2463.00
2465.00
2450.00
2479.00
2451.00
2454.00
2457.00
2762.00
2766.00
2766.00
2767.00
2776.00
2839.00
2879.00
2847.00
2828.00
2835.00
2846.00
2779.00
2930.00
2872.00
2939.00
2918.00
2900.00
2976.00
2887.00
2780.00
2768.00
2806.00
2833.00
2774.00
2822.00
2821.00
2820.00
2837.00
2810.00
2851.00
2832.00
2834.00
2861.00
2859.00
2841.00
2871.00
2841.00
2857.00
2857.00
2857.00
2873.00
2849.00
2843.00
2859.00
2833.00
2863.00
2866.00
2860.00
2825.00
2854.00
2829.00
2864.00
2849.00
2847.00
2860.00
2860.00
2867.00
2861.00
2843.00
2849.00
2860.00
2862.00
2874.00
2858.00
2870.00
2855.00
2859.00
2855.00
2850.00
2866.00
2865.00
2872.00
2846.00
2848.00
2850.00
2855.00
2858.00
2870.00
2873.00
2866.00
2870.00
2842.00
2860.00
2854.00
2858.00
2861.00
2866.00
2855.00
2865.00
2846.00
2866.00
2848.00
2867.00
2855.00
2853.00
2860.00
2841.00
2856.00
2865.00
2853.00
2878.00
2867.00
2833.00
2880.00
2879.00
2871.00
2848.00
2867.00
2873.00
2868.00
2887.00
2854.00
2883.00
2882.00
2869.00
2894.00
2890.00
2883.00
2880.00
2880.00
2914.00
2902.00
2899.00
2897.00
2894.00
2875.00
2891.00
2897.00
2899.00
2910.00
2893.00
2884.00
2919.00
2883.00
2919.00
2899.00
2910.00
2905.00
2881.00
2901.00
2895.00
2904.00
2900.00
2901.00
2893.00
2895.00
2884.00
2885.00
2899.00
2899.00
2883.00
2895.00
2877.00
2883.00
2879.00
2876.00
2867.00
2868.00
2875.00
2884.00
2875.00
2887.00
2874.00
2878.00
2877.00
2862.00
2838.00
2875.00
2856.00
2868.00
2879.00
2872.00
2866.00
2875.00
2864.00
2863.00
2860.00
2862.00
2872.00
2868.00
2852.00
2869.00
2876.00
2866.00
2871.00
2883.00
2871.00
2888.00
2889.00
2873.00
2882.00
2871.00
2885.00
2885.00
2879.00
2884.00
2877.00
2870.00
2880.00
2887.00
2892.00
2874.00
2881.00
2870.00
2878.00
2869.00
2894.00
2884.00
2894.00
2884.00
2892.00
2901.00
2896.00
2906.00
2885.00
2901.00
2893.00
2902.00
2906.00
2897.00
2905.00
2914.00
2898.00
2900.00
2917.00
2898.00
2889.00
2906.00
2907.00
2881.00
2898.00
2886.00
2926.00
2919.00
2914.00
2907.00
2892.00
2898.00
2908.00
2913.00
2917.00
2909.00
2898.00
2904.00
2919.00
2915.00
2896.00
2902.00
2899.00
2895.00
2916.00
2897.00
2893.00
2901.00
2918.00
2918.00
2931.00
2924.00
2927.00
2918.00
2914.00
2915.00
2933.00
2905.00
2932.00
2921.00
2925.00
2927.00
2928.00
2931.00
2925.00
2932.00
2952.00
2927.00
2926.00
2924.00
2936.00
2944.00
2923.00
2920.00
2952.00
2944.00
2936.00
2932.00
2933.00
2926.00
2944.00
2941.00
2946.00
2954.00
2953.00
2962.00
2969.00
2958.00
2958.00
2964.00
2961.00
2961.00
2958.00
2957.00
2956.00
2952.00
2957.00
2950.00
2928.00
2939.00
2929.00
2953.00
2939.00
2945.00
2933.00
2955.00
2926.00
2942.00
2952.00
2954.00
2953.00
2944.00
2946.00
2949.00
2938.00
2943.00
2953.00
2951.00
2959.00
2942.00
2952.00
2955.00
2968.00
2960.00
2961.00
2955.00
2934.00
2951.00
2945.00
2929.00
2922.00
2910.00
2940.00
2935.00
2938.00
2945.00
2934.00
2958.00
2944.00
2940.00
2970.00
2958.00
2936.00
2942.00
2947.00
2946.00
2938.00
2968.00
2927.00
2926.00
2933.00
2922.00
2939.00
2903.00
2942.00
2919.00
2920.00
2931.00
2934.00
2926.00

Binary file not shown.

Before

Width:  |  Height:  |  Size: 245 KiB

After

Width:  |  Height:  |  Size: 220 KiB

View File

@ -1,366 +1,366 @@
31.58
32.10
32.01
32.15
32.23
32.22
32.19
31.86
31.39
31.07
31.17
31.52
32.12
33.29
33.62
33.68
34.00
33.68
33.60
33.47
33.44
33.32
32.91
32.61
32.60
32.44
32.65
32.77
32.98
32.72
32.84
32.89
32.89
32.79
32.82
32.97
32.90
33.02
32.96
32.95
32.93
33.03
33.15
33.93
34.53
34.57
34.61
34.66
34.99
35.25
35.11
35.26
35.17
35.08
35.37
35.71
35.67
35.76
35.08
35.22
35.27
35.40
35.32
35.49
35.09
35.25
35.33
35.55
35.85
35.85
35.68
35.70
35.55
35.41
35.86
35.60
35.69
35.96
35.64
35.93
35.91
36.15
36.42
36.34
36.97
36.89
37.32
37.19
37.20
37.38
37.18
36.89
37.10
37.11
36.94
36.28
36.14
35.90
36.47
36.15
36.10
35.66
35.76
35.68
35.62
35.76
35.73
36.09
35.80
36.14
35.81
35.90
35.81
35.80
35.69
35.83
35.72
35.46
35.52
35.61
35.68
35.36
35.24
34.97
34.72
34.48
34.37
34.27
34.23
34.09
33.75
33.88
33.88
33.78
33.72
33.63
33.55
32.99
33.07
32.94
33.20
33.11
32.78
32.86
32.83
32.86
32.72
33.04
32.79
32.90
32.77
32.68
32.81
32.69
32.56
32.36
32.23
32.05
32.04
32.00
31.85
31.64
31.56
31.43
31.14
31.17
31.16
31.14
30.91
30.57
30.01
29.78
29.40
29.10
28.77
28.41
12.45
12.53
12.49
12.44
12.47
12.54
12.34
12.51
12.44
12.45
12.53
12.46
12.41
12.38
12.31
12.10
12.15
12.11
12.27
12.23
12.11
12.05
11.98
12.00
11.92
11.77
11.78
11.62
11.63
11.74
11.91
12.11
12.17
12.14
12.10
12.11
12.03
12.14
12.21
12.31
12.36
12.36
12.42
12.34
12.27
12.27
12.25
12.23
12.16
12.18
12.04
12.16
12.27
12.39
12.55
12.62
12.70
12.92
13.34
13.94
14.46
14.23
14.11
14.33
14.61
14.67
14.50
14.56
14.72
14.90
14.99
15.04
15.02
14.97
14.77
14.57
14.62
14.75
14.97
15.23
15.22
15.08
15.14
15.19
15.29
15.43
15.47
15.54
15.66
15.61
15.64
15.66
15.64
15.69
15.76
15.74
15.77
15.70
15.73
15.71
15.75
15.76
15.63
15.65
15.66
15.70
15.80
15.76
15.74
15.75
15.77
15.76
15.91
16.02
16.14
16.31
16.37
16.32
16.38
16.33
16.48
16.79
17.05
17.35
17.60
17.76
17.90
18.25
18.94
19.84
20.13
20.14
20.37
20.82
21.90
23.16
22.90
22.21
22.35
22.86
23.10
22.84
22.78
23.01
22.81
22.48
22.37
22.33
22.51
22.75
22.71
22.30
22.24
22.19
21.90
21.68
21.36
20.93
20.89
21.21
21.64
21.49
21.55
21.78
21.61
21.61
21.53
21.50
21.52
21.27
21.24
21.32
21.35
21.21
21.17
21.16
21.18
21.25
21.35
21.46
21.40
21.35
21.47
21.60
21.84
22.05
22.13
22.06
22.32
22.78
22.79
22.80
22.89
22.87
23.09
22.90
22.91
23.10
23.03
22.94
22.92
22.94
22.91
23.14
23.17
23.30
23.20
23.13
23.27
23.42
23.59
23.60
23.65
23.77
23.84
23.81
24.16
24.10
24.05
24.03
24.12
24.24
24.28
24.38
24.51
24.61
24.91
25.23
25.54
25.87
26.15
26.32
26.43
26.48
26.77
27.32
27.76
27.94
27.68
27.59
27.86
27.03
25.70
25.57
26.07
26.44
26.66
27.00
27.37
27.90
28.35
28.58
28.42
28.47
28.39
28.60
28.57
28.72
28.74
28.95
29.75
30.46
30.72
30.65
30.66
30.90
31.06
31.17
31.30
31.33
31.55
31.73
31.79
32.07
32.50
33.08
33.49
33.61
33.43
33.47
33.78
33.83
33.91
33.69
33.79
34.32
34.69
35.13
35.33
35.50
35.45
35.58
35.63
35.93
36.52
37.11
37.38
37.35
37.43
37.55
37.73
37.65
37.45
37.21
37.07
37.05
37.03
36.93
36.97
36.72
36.51
36.41
36.36
36.30
36.43
36.69
36.73
36.90
37.04
37.16
37.17
37.18
37.17
37.14
37.13
36.75
37.03
36.96
36.83
36.79
36.77
36.56
36.38
36.30
36.27
36.40
36.44
36.47
36.48
36.51
36.77
36.93
36.94
36.96
36.68
36.46
36.29
36.12
35.81
36.10
36.26
36.46
36.42
36.27
36.26
36.12
36.13
36.20
36.00
35.89
35.53
35.59
35.11
35.19
34.76
34.97
34.97
34.79
34.37
34.16
34.00
34.04
33.85
33.18
32.62
32.56
32.68
32.98
32.67
32.64
32.65
32.82
32.96
32.81
32.29
32.13
32.14
31.86
31.52
31.11
30.81
30.50
30.28
29.84
29.39
29.05
29.59
29.62
29.73
29.74
29.38
29.14
29.02
28.48
28.46
28.32
28.63
28.67
28.72
28.62
28.69
28.79
28.96
29.26
29.35
29.54
29.24
29.34
29.18
29.37
29.35
29.38
29.15
29.13
29.17
29.04
29.07
29.17
29.19
29.19
28.12
28.13
28.02
28.01
28.10
28.02
27.71
27.48
27.34
27.10
26.76
26.21
26.45
26.45
26.22
26.34
26.25
26.23
26.21
26.10
25.79
25.05
24.65
24.73
24.94
25.28
25.24
24.98
24.73
24.94
24.90
24.75
24.43
24.32
24.17
23.78
23.04
22.71
22.70
22.69
22.67
22.73
22.67
22.67
22.87
23.02
23.22
23.07
23.12
22.68
22.15
21.38
21.19
20.25
19.62
19.91
19.87
19.82
19.71
19.02
18.16
17.99
17.38
16.52
16.61
16.98
17.08
17.16
17.69
18.20
18.10
17.85
17.22
16.71
16.53
16.06
15.39
15.24
14.89
15.01
15.31
15.38
15.07
15.16
14.72
14.33
14.18
14.06
14.03
14.39
14.77
14.92
15.12
15.15
15.16
15.21
15.34
15.30
15.40
15.20
15.30
15.08
14.77
14.32
14.24
14.24
14.17
14.03
14.02
13.97
14.13
14.30
14.20
14.24
14.33
14.32
14.33
14.48
14.71
14.80
14.53
14.69
14.76
15.32
15.09

Binary file not shown.

Before

Width:  |  Height:  |  Size: 244 KiB

After

Width:  |  Height:  |  Size: 226 KiB

View File

@ -1,366 +1,366 @@
30.99
31.25
31.31
31.39
31.40
31.60
31.43
31.23
30.78
30.59
30.58
30.94
31.45
32.41
32.87
33.06
33.34
33.18
32.91
32.78
32.85
32.68
32.40
32.18
31.91
32.00
31.93
32.15
32.45
32.23
32.36
32.43
32.42
32.37
32.33
32.54
32.37
32.33
32.40
32.39
32.38
32.45
32.59
33.53
34.00
34.10
34.06
34.14
34.37
34.72
34.62
34.66
34.56
34.56
34.84
35.14
35.24
35.16
34.56
34.66
34.77
34.93
34.94
34.88
34.52
34.70
34.85
34.98
35.25
35.31
35.07
35.03
34.92
34.83
35.32
34.89
35.11
35.09
34.88
35.20
35.30
35.58
35.78
35.88
36.45
36.31
36.81
36.56
36.73
36.73
36.30
36.19
36.40
36.33
36.24
35.64
35.57
35.30
35.70
35.53
35.48
35.09
35.15
35.07
35.29
35.13
35.09
35.52
35.21
35.61
35.25
35.37
35.43
35.25
35.10
35.26
35.19
35.00
35.00
35.06
35.06
34.96
34.66
34.48
34.15
33.87
33.81
33.73
33.65
33.52
33.31
33.40
33.36
33.30
33.26
33.07
33.04
32.52
32.37
32.26
32.53
32.47
32.25
32.25
32.26
32.27
32.21
32.40
32.18
32.30
32.21
32.15
32.25
32.31
32.08
31.80
31.69
31.67
31.54
31.45
31.42
31.13
31.12
30.86
30.53
30.59
30.69
30.54
30.38
29.99
29.62
29.36
28.86
28.57
28.19
27.86
27.41
26.48
25.17
24.99
25.60
12.11
12.16
12.16
12.01
12.16
12.13
11.89
12.07
11.98
12.09
12.15
12.09
12.00
11.99
11.85
11.69
11.83
11.78
11.92
11.82
11.78
11.77
11.64
11.67
11.59
11.42
11.45
11.26
11.21
11.40
11.52
11.69
11.79
11.70
11.77
11.71
11.68
11.72
11.78
11.90
11.92
11.93
11.96
11.84
11.83
11.86
11.87
11.86
11.76
11.78
11.62
11.76
11.90
12.00
12.15
12.23
12.32
12.58
12.95
13.54
13.95
13.84
13.70
13.94
14.18
14.30
14.13
14.19
14.31
14.46
14.58
14.61
14.67
14.55
14.38
14.15
14.17
14.31
14.59
14.75
14.68
14.60
14.68
14.78
14.85
14.90
14.98
15.14
15.19
15.08
15.21
15.20
15.20
15.27
15.34
15.29
15.35
15.26
15.33
15.27
15.30
15.34
15.22
15.28
15.24
15.23
15.36
15.29
15.31
15.34
15.35
15.35
15.48
15.66
15.76
15.87
15.93
15.93
15.95
15.94
16.13
16.34
16.67
16.92
17.16
17.31
17.48
17.82
18.47
19.29
19.56
19.64
19.82
20.30
21.41
22.32
22.27
21.60
21.81
22.34
22.56
22.33
22.30
22.54
22.30
22.04
21.90
21.83
22.02
22.19
22.20
21.86
21.76
21.58
21.48
21.16
20.82
20.45
20.41
20.74
21.14
21.00
21.07
21.28
21.18
21.13
21.00
20.99
21.03
20.77
20.82
20.86
20.93
20.74
20.84
20.61
20.71
20.87
20.89
20.95
20.97
20.94
20.93
21.22
21.38
21.59
21.63
21.60
21.84
22.33
22.31
22.30
22.32
22.50
22.73
22.64
22.61
22.72
22.74
22.54
22.53
22.52
22.53
22.69
22.84
22.87
22.81
22.68
22.85
22.96
23.13
23.15
23.18
23.28
23.39
23.44
23.68
23.61
23.55
23.55
23.63
23.79
23.86
24.03
24.03
24.07
24.55
24.76
25.13
25.43
25.64
25.92
26.20
26.39
26.81
27.24
27.76
28.00
27.81
27.88
27.97
28.18
28.21
28.21
28.41
28.59
29.21
29.98
30.09
30.27
30.12
30.35
30.50
30.66
30.87
30.82
30.96
31.17
31.25
31.43
31.87
32.45
33.06
33.08
32.88
32.97
33.25
33.29
33.44
33.25
33.25
33.73
34.15
34.58
34.77
35.01
34.85
35.15
35.20
35.48
35.85
36.50
36.87
36.82
36.90
37.16
37.26
37.16
36.90
36.76
36.63
36.53
36.66
36.50
36.48
36.25
36.13
35.96
35.86
35.90
36.02
36.23
36.27
36.35
36.57
36.64
36.71
36.68
36.67
36.66
36.74
36.18
36.62
36.54
36.46
36.33
36.26
36.06
35.91
35.78
35.76
35.85
35.93
35.97
36.02
36.05
36.32
36.44
36.39
36.43
36.25
36.01
35.79
35.61
35.27
35.62
35.80
36.04
35.96
35.83
35.78
35.66
35.62
35.65
35.47
35.42
35.01
35.16
34.60
34.72
34.33
34.42
34.54
34.36
33.92
33.81
33.60
33.59
33.21
32.70
31.97
32.03
32.19
32.32
32.13
32.16
32.30
32.30
32.45
32.32
31.83
31.72
31.47
31.37
30.98
30.61
30.38
30.08
29.72
29.41
29.31
28.75
29.09
29.13
29.20
29.25
28.91
28.67
28.54
28.04
27.99
27.78
28.23
28.31
28.25
28.11
28.22
28.41
28.54
28.81
28.93
29.06
28.90
28.91
28.77
28.96
28.93
28.94
28.81
28.75
28.74
28.59
28.66
28.88
28.79
28.85
26.02
26.04
26.34
26.78
27.28
27.37
27.22
27.07
27.30
27.65
27.60
27.47
27.46
27.56
27.50
27.15
27.02
26.86
26.65
26.22
25.62
25.76
25.82
25.70
25.95
25.73
25.72
25.64
25.59
25.41
24.39
23.96
24.17
24.43
24.74
24.76
24.45
24.21
24.37
24.41
24.18
23.90
23.90
23.65
23.18
22.39
22.11
22.01
22.25
22.18
22.26
22.09
22.11
22.30
22.45
22.65
22.51
22.51
22.11
21.39
20.70
20.54
19.59
18.94
19.29
19.18
19.21
19.17
18.39
17.87
17.37
16.71
16.14
16.08
16.37
16.50
16.61
17.18
17.83
17.45
17.36
16.72
16.22
16.12
15.62
15.11
14.74
14.50
14.61
14.96
14.96
14.75
14.78
14.23
13.91
13.59
13.63
13.56
13.71
14.44
14.41
14.81
14.76
14.93
14.89
15.05
14.86
15.07
14.85
14.78
14.37
14.38
13.91
13.75
13.93
13.75
13.45
13.50
13.56
13.81
13.86
13.92
13.90
13.93
13.71
13.85
13.99
14.10
14.21
14.15
14.31
14.42
14.51
14.75

Binary file not shown.

Before

Width:  |  Height:  |  Size: 245 KiB

After

Width:  |  Height:  |  Size: 228 KiB

View File

@ -1,366 +1,366 @@
32.41
32.66
32.70
32.82
33.30
33.22
32.88
32.57
32.18
32.00
31.98
32.30
33.00
34.25
34.82
34.84
35.00
34.94
34.80
34.68
34.46
34.39
34.06
33.75
33.40
33.21
33.44
33.52
33.82
33.68
33.65
33.77
33.74
33.69
33.67
33.77
33.72
33.80
33.81
33.66
33.73
33.77
34.02
34.74
35.42
35.58
35.67
35.63
35.98
36.14
36.10
36.18
36.06
36.13
36.12
36.50
36.68
36.56
36.14
36.24
36.32
36.44
36.47
36.46
36.33
36.72
36.95
36.17
36.41
36.34
36.35
36.17
36.18
37.09
37.49
37.12
37.44
37.41
37.57
37.55
37.47
37.78
37.86
38.05
38.30
38.52
38.74
38.46
38.54
38.66
38.75
38.46
38.39
38.20
38.07
37.58
37.44
36.92
37.54
37.11
37.04
36.75
36.50
36.55
36.60
36.46
36.54
36.82
36.64
36.71
36.50
36.59
36.61
36.50
36.42
36.49
36.41
36.33
36.26
36.31
36.19
36.17
35.88
35.81
35.46
35.17
35.13
35.06
34.91
34.83
34.62
34.56
34.46
34.38
34.32
34.23
34.20
33.97
33.77
33.75
33.97
33.85
33.76
33.76
33.27
33.72
33.78
33.77
33.78
33.78
33.64
33.63
33.62
33.40
33.31
33.20
32.92
32.92
32.75
32.80
32.72
32.56
32.43
32.19
32.13
32.06
32.13
32.04
31.82
31.57
31.05
30.77
30.37
30.05
29.63
29.35
28.74
28.04
26.94
26.81
27.32
27.40
27.51
27.60
27.87
28.42
28.91
29.05
29.07
29.18
29.14
29.15
29.41
29.49
29.51
29.79
30.55
31.45
31.52
31.58
31.55
31.74
31.90
31.93
32.05
32.16
32.28
32.53
32.64
32.89
33.32
33.93
34.34
34.51
34.46
34.52
34.57
34.68
34.64
34.54
34.64
35.00
35.42
35.90
36.09
36.23
36.27
36.28
36.34
36.68
37.30
38.09
38.34
38.34
38.22
38.39
38.45
38.40
38.23
37.96
37.85
37.74
37.71
37.65
37.42
37.35
37.19
37.03
36.98
36.93
37.09
37.19
37.31
37.31
37.54
37.57
37.81
37.76
37.82
37.86
37.81
37.72
37.59
37.56
37.49
37.37
37.35
37.11
36.97
36.90
36.92
36.93
36.97
36.97
37.18
37.21
37.29
37.51
37.56
37.53
37.42
37.25
36.89
36.83
36.66
36.78
36.79
36.95
37.07
36.94
36.87
36.87
36.81
36.69
36.59
36.43
36.15
35.99
35.75
35.60
35.42
35.47
35.43
35.31
35.02
34.86
34.69
34.45
34.20
33.67
33.18
33.07
33.39
33.52
33.21
33.21
33.15
33.40
33.43
33.36
33.19
32.79
32.76
32.59
32.20
31.68
31.26
31.06
30.65
30.27
29.93
29.74
29.86
30.08
30.17
30.01
29.93
29.80
29.43
29.05
28.89
28.85
28.89
28.99
28.92
28.87
28.98
29.17
29.48
29.74
29.83
29.81
29.66
29.62
29.60
29.72
29.67
29.64
29.55
29.42
29.41
29.49
29.54
29.48
29.46
29.51
12.73
12.61
12.63
12.58
12.65
12.67
12.71
12.76
12.68
12.72
12.67
12.65
12.64
12.64
12.56
12.37
12.32
12.40
12.38
12.24
12.30
12.23
12.15
12.19
12.10
11.98
11.91
11.80
11.78
11.84
12.06
12.29
12.37
12.35
12.39
12.36
12.31
12.32
12.43
12.54
12.64
12.67
12.71
12.68
12.63
12.61
12.53
12.52
12.45
12.41
12.36
12.39
12.51
12.69
12.80
12.92
13.02
13.27
13.77
14.51
14.78
14.52
14.33
14.65
14.75
14.90
14.79
14.82
15.03
15.27
15.45
15.46
15.41
15.26
15.13
14.87
14.84
15.02
15.31
15.64
15.53
15.46
15.50
15.59
15.64
15.70
15.88
15.96
16.01
15.90
15.94
15.91
15.96
16.01
16.02
16.00
16.04
16.03
16.00
15.97
16.06
16.09
16.04
15.98
15.99
16.04
16.03
16.02
16.01
15.99
15.99
16.07
16.22
16.30
16.45
16.62
16.65
16.62
16.68
16.66
16.80
17.14
17.52
17.82
17.96
18.13
18.21
18.75
19.46
20.39
20.52
20.52
20.73
21.48
23.00
23.92
23.35
22.31
22.64
23.71
23.67
23.11
23.18
23.38
23.09
22.88
22.72
22.82
22.77
23.02
23.03
22.73
22.56
22.62
22.42
22.12
21.64
21.12
21.04
21.47
22.02
21.98
21.83
22.00
21.88
21.81
21.80
21.93
21.99
21.58
21.52
21.49
21.60
21.57
21.65
21.60
21.45
21.46
21.66
21.63
21.70
21.73
21.77
21.87
22.14
22.37
22.44
22.43
22.74
23.29
23.21
23.21
23.29
23.56
23.69
23.51
23.49
23.66
23.60
23.37
23.34
23.29
23.39
23.60
23.65
23.68
23.57
23.59
23.72
23.94
24.02
24.04
24.07
24.17
24.29
24.31
24.52
24.53
24.39
24.29
24.38
24.54
24.59
24.82
24.92
25.01
25.34
25.61
25.93
26.28
26.57
26.68
26.67
26.82
27.16
27.73
28.24
28.40
28.18
27.81
28.17
28.59
28.46
28.26
28.54
28.52
28.15
27.80
27.55
27.44
27.50
27.10
26.59
26.67
26.83
26.44
26.55
26.40
26.49
26.44
26.45
26.16
25.42
25.21
25.13
25.31
25.62
25.69
25.35
25.11
25.35
25.22
25.01
24.85
24.63
24.54
24.16
23.36
22.93
23.04
22.92
23.01
23.22
23.23
23.13
23.17
23.48
23.73
23.56
23.57
23.25
22.54
21.87
21.71
21.00
20.27
20.40
20.32
20.25
20.17
19.40
18.69
18.43
17.51
17.09
16.98
17.48
17.53
17.64
18.19
18.70
18.66
18.18
17.64
16.84
16.94
16.45
15.98
15.54
15.20
15.27
15.76
15.70
15.46
15.51
15.08
14.71
14.56
14.38
14.40
14.80
15.17
15.38
15.54
15.54
15.60
16.55
15.72
15.76
15.80
15.65
15.74
15.53
15.10
14.67
14.54
14.61
14.47
14.25
14.27
14.33
14.42
14.53
14.51
14.58
14.53
14.44
14.52
14.79
15.01
14.97
14.91
14.96
15.08
15.49
15.63

View File

@ -1,4 +1,4 @@
20201020
20200820
20200523
20191120
20230122
20221122
20220825
20220221

View File

@ -1,3 +1,4 @@
# https://www.bilibili.com/video/BV1ar4y137GD/?spm_id_from=333.337.search-card.all.click&vd_source=311a862c74a77082f872d2e1ab5d1523
import numpy as np
import matplotlib.pyplot as plt
@ -16,15 +17,15 @@ def plotf(loss):
def main():
x = 15
lr = 0.1
lr = 1.1
steps = 400
loss = []
for i in range(steps):
x = x-lr*df(x)
loss.append(f(x))
print(loss[i])
# y = f(x)
# print(y)
# print(loss[i])
y = f(x)
print(y)
plotf(loss)