optimization in python

Published

November 11, 2022

import numpy as np
from scipy.optimize import minimize
 
# 定义目标函数
def func(x, sign=1.0):
    # scipy.minimize默认求最小,求max时只需要sign*(-1),跟下面的args对应
    return sign * (x[0] ** 2 + x[1] ** 2 + x[2] ** 2)
    # return sign * (np.power(x[0], 2) + np.power(x[1], 2) + np.power(x[2], 2))
 
# 定义目标函数的梯度
def func_deriv(x, sign=1):
    jac_x0 = sign * (2 * x[0])
    jac_x1 = sign * (2 * x[1])
    jac_x2 = sign * (2 * x[2])
    return np.array([jac_x0, jac_x1, jac_x2])
 
# 定义约束条件
# constraints are defined as a sequence of dictionaries, with keys type, fun and jac.
cons = (
    {'type': 'eq',
     'fun': lambda x: np.array([x[0] + 2 * x[1] - x[2] - 4]),
     'jac': lambda x: np.array([1, 2, -1])},
 
    {'type': 'eq',
     'fun': lambda x: np.array([x[0] - x[1] - x[2] + 2]),
     'jac': lambda x: np.array([1, -1, -1])}
    )
 
# 定义初始解x0
x0 = np.array([-1.0, 1.0, 1.0])
 
# 使用SLSQP算法求解
res = minimize(func, x0 , args=(1,), jac=func_deriv, method='SLSQP', options={'disp': True},constraints=cons)
# args是传递给目标函数和偏导的参数,此例中为1,求min问题。args=-1时是求解max问题
print(res.x)
Optimization terminated successfully    (Exit mode 0)
            Current function value: 4.000000000000002
            Iterations: 2
            Function evaluations: 2
            Gradient evaluations: 2
[-2.22044605e-16  2.00000000e+00 -6.66133815e-16]
from scipy.optimize import minimize
from numpy.random import rand

# 两个未知数
def objective(x):
    return x[0]**2.0 + x[1]**2

r_min,r_max = -5.0,5.0

# 生成随机初始值
pt = r_min + rand(3) * (r_max - r_min)

result = minimize(objective,pt,method='L-BFGS-B')

print("Status: %s" %result['message'])
print("Total Evaluation: %d" %result['nfev'])

solution = result['x']
evaluation = objective(solution)
print("Solution:f(%s) = %.5f" %(solution,evaluation))
print(result)
Status: CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL
Total Evaluation: 12
Solution:f([ 3.61107754e-08  7.45137776e-08 -3.83671977e+00]) = 0.00000
      fun: 6.85629114893573e-15
 hess_inv: <3x3 LbfgsInvHessProduct with dtype=float64>
      jac: array([8.22215507e-08, 1.59027555e-07, 0.00000000e+00])
  message: 'CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL'
     nfev: 12
      nit: 2
     njev: 3
   status: 0
  success: True
        x: array([ 3.61107754e-08,  7.45137776e-08, -3.83671977e+00])
pt
array([ 0.49348315,  2.45112577, -3.83671977])
def object