您好,欢迎来到化拓教育网。
搜索
您的当前位置:首页优化算法.梯度下降算法

优化算法.梯度下降算法

来源:化拓教育网

 无它,关键思想就是梯度的数学原理

梯度下降算法二维代码

import numpy as np
import matplotlib.pyplot as plt
def f(x):
    return (x - 3)**2

def grad_f(x):
    return 2 * (x - 3)
def gradient_descent(starting_point, learning_rate, num_iterations):
    x = starting_point
    x_history = [x]
    
    for _ in range(num_iterations):
        gradient = grad_f(x)
        x = x - learning_rate * gradient
        x_history.append(x)
    
    return x, x_history
starting_point = 0  # 初始点
learning_rate = 0.1  # 学习率
num_iterations = 20  # 迭代次数

optimal_x, x_history = gradient_descent(starting_point, learning_rate, num_iterations)
# 绘制目标函数
x_values = np.linspace(-1, 5, 400)
y_values = f(x_values)

plt.figure(figsize=(10, 6))
plt.plot(x_values, y_values, label='Objective Function $(x-3)^2$', color='blue')
plt.scatter(x_history, [f(x) for x in x_history], color='red', label='Gradient Descent Steps')
plt.plot(x_history, [f(x) for x in x_history], linestyle='--', color='red')
plt.axvline(x=3, color='green', linestyle='--', label='Minimum x=3')
plt.xlabel('x')
plt.ylabel('f(x)')
plt.title('Gradient Descent Visualization')
plt.legend()
plt.grid()
plt.show()

梯度下降算法三维代码

import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D

# 目标函数
def f(x, y):
    return (x - 2)**2 + (y - 3)**2

# 目标函数的梯度
def grad_f(x, y):
    df_dx = 2 * (x - 2)
    df_dy = 2 * (y - 3)
    return df_dx, df_dy

# 梯度下降算法
def gradient_descent(starting_point, learning_rate, num_iterations):
    x, y = starting_point
    x_history, y_history = [x], [y]
    
    for _ in range(num_iterations):
        df_dx, df_dy = grad_f(x, y)
        x = x - learning_rate * df_dx
        y = y - learning_rate * df_dy
        x_history.append(x)
        y_history.append(y)
    
    return (x, y), x_history, y_history

# 参数设置
starting_point = (0, 0)  # 初始点
learning_rate = 0.1  # 学习率
num_iterations = 30  # 迭代次数

# 执行梯度下降
optimal_point, x_history, y_history = gradient_descent(starting_point, learning_rate, num_iterations)

# 绘制目标函数的3D图
x = np.linspace(0, 4, 100)
y = np.linspace(0, 6, 100)
X, Y = np.meshgrid(x, y)
Z = f(X, Y)

fig = plt.figure(figsize=(12, 8))
ax = fig.add_subplot(111, projection='3d')

# 绘制目标函数的表面图
ax.plot_surface(X, Y, Z, cmap='viridis', alpha=0.7)

# 绘制梯度下降的路径
ax.plot(x_history, y_history, f(np.array(x_history), np.array(y_history)), color='red', marker='o')

# 绘制目标函数的最小值位置
ax.scatter(2, 3, f(2, 3), color='green', s=100, label='Minimum (2, 3)')

ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('f(X, Y)')
ax.set_title('3D Gradient Descent Visualization')
ax.legend()

plt.show()

因篇幅问题不能全部显示,请点此查看更多更全内容

Copyright © 2019- huatuo9.cn 版权所有 赣ICP备2023008801号-1

违法及侵权请联系:TEL:199 18 7713 E-MAIL:2724546146@qq.com

本站由北京市万商天勤律师事务所王兴未律师提供法律服务