Convex Optimizer

I am using python2.7 and you need to find the maximum of a multidimensional scalar function.

In other words, I have this function:

def myFun(a,b,c,d,e,f):
    # complex calculation that takes about 30 seconds
    return res # res is a float

This function is NOT convex.

I specify the minimum and maximum possible value for each argument a, b, c, d, e and f. I need to find which combination of argument approximately leads to the maximum value for myFun. I will give him a worthy starting point.

I tried to search in a coarse power grid, but given how long my function has performed calculations, it is not viable.

I looked into the scipy package. I saw, in particular, a function scipy.optimize.fmin_slsqp. Would that be appropriate for my problem? Or maybe scipy.optimize.fmin()? Is there any other function / module suitable for this?

+1
2

, CVXPY (http://www.cvxpy.org/en/latest), , , CVXOPT ( ). CVXOPT CVXPY, .

python , : https://scicomp.stackexchange.com/questions/83/is-there-a-high-quality-nonlinear-programming-solver-for-python... , , , .

mystic (https://pypi.python.org/pypi/mystic). , , , , . . , mystic , . , mystic , , ( ), mystic , .

100 , mystic:

'''
    Maximize: f = 2*x[0]*x[1] + 2*x[0] - x[0]**2 - 2*x[1]**2

    Subject to:    x[0]**3 - x[1] == 0
                             x[1] >= 1
'''

( ):

def objective(x):
    return 2*x[0]*x[1] + 2*x[0] - x[0]**2 - 2*x[1]**2

equations = """
x0**3 - x1 == 0.0
"""
bounds = [(None, None),(1.0, None)]

# with penalty='penalty' applied, solution is:
xs = [1,1]; ys = -1.0

from mystic.symbolic import generate_conditions, generate_penalty
pf = generate_penalty(generate_conditions(equations), k=1e4)
from mystic.symbolic import generate_constraint, generate_solvers, solve
cf = generate_constraint(generate_solvers(solve(equations)))

# inverted objective, used in solving for the maximum
_objective = lambda x: -objective(x)


if __name__ == '__main__':

  from mystic.solvers import diffev2, fmin_powell
  from mystic.math import almostEqual

  result = diffev2(_objective, x0=bounds, bounds=bounds, constraint=cf, penalty=pf, npop=40, ftol=1e-8, gtol=100, disp=False, full_output=True)
  assert almostEqual(result[0], xs, rel=2e-2)
  assert almostEqual(result[1], ys, rel=2e-2)

  result = fmin_powell(_objective, x0=[-1.0,1.0], bounds=bounds, constraint=cf, penalty=pf, disp=False, full_output=True)
  assert almostEqual(result[0], xs, rel=2e-2)
  assert almostEqual(result[1], ys, rel=2e-2)

:

"""
    Fit linear and quadratic polynomial to noisy data:
               y(x) ~ a + b * x   --or--   y(x) ~ a + b * x + c * x**2
    where:
               0 >= x >= 4
               y(x) = y0(x) + yn
               y0(x) = 1.5 * exp(-0.2 * x) + 0.3
               yn = 0.1 * Normal(0,1)
"""

():

from numpy import polyfit, poly1d, linspace, exp
from numpy.random import normal
from mystic.math import polyeval
from mystic import reduced

# Create clean data.
x = linspace(0, 4.0, 100)
y0 = 1.5 * exp(-0.2 * x) + 0.3

# Add a bit of noise.
noise = 0.1 * normal(size=100) 
y = y0 + noise

@reduced(lambda x,y: abs(x)+abs(y))
def objective(coeffs, x, y):
    return polyeval(coeffs, x) - y

bounds = [(None, None), (None, None), (None, None)]
args = (x, y)

# 'solution' is:
xs = polyfit(x, y, 2) 
ys = objective(xs, x, y)


if __name__ == '__main__':

  from mystic.solvers import diffev2, fmin_powell
  from mystic.math import almostEqual

  result = diffev2(objective, args=args, x0=bounds, bounds=bounds, npop=40, ftol=1e-8, gtol=100, disp=False, full_output=True)
  assert almostEqual(result[0], xs, tol=1e-1)
  assert almostEqual(result[1], ys, rel=1e-1)

  result = fmin_powell(objective, args=args, x0=[0.0,0.0,0.0], bounds=bounds, disp=False, full_output=True)
  assert almostEqual(result[0], xs, tol=1e-1)
  assert almostEqual(result[1], ys, rel=1e-1)

mystic pathos pyina (. https://github.com/uqfoundation ), , . . , ( ) , (- ).

+3

. L-BFGS scipy.optimize. . , . L-BFGS scipy.optimize , , .

+1

Source: https://habr.com/ru/post/1526824/


All Articles