问题
I wanted to use scalers and ended up testing your Paraboloid example from OpenMDAO 0.x docs with OpenMDAO 1.x, but I get weird results with or without scalers. Here is the code :
from __future__ import print_function import sys from openmdao.api import IndepVarComp, Component, Problem, Group, ScipyOptimizer class Paraboloid(Component): def __init__(self): super(Paraboloid, self).__init__() self.add_param('x', val=0.0) self.add_param('y', val=0.0) self.add_output('f_xy', val=0.0) def solve_nonlinear(self, params, unknowns, resids): x = params['x'] y = params['y'] #unknowns['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0 unknowns['f_xy'] = (1000.*x-3.)**2 + (1000.*x)*(0.01*y) + (0.01*y+4.)**2 - 3. def linearize(self, params, unknowns, resids): """ Jacobian for our paraboloid.""" x = params['x'] y = params['y'] J = {} #J['f_xy', 'x'] = 2.0*x - 6.0 + y #J['f_xy', 'y'] = 2.0*y + 8.0 + x J['f_xy', 'x'] = 2000000.0*x - 6000.0 + 10.0*y J['f_xy', 'y'] = 0.0002*y + 0.08 + 10.0*x return J if __name__ == "__main__": top = Problem() root = top.root = Group() root.add('p1', IndepVarComp('x', 3.0)) root.add('p2', IndepVarComp('y', -4.0)) root.add('p', Paraboloid()) root.connect('p1.x', 'p.x') root.connect('p2.y', 'p.y') top.driver = ScipyOptimizer() top.driver.options['optimizer'] = 'SLSQP' top.driver.add_desvar('p1.x', lower=-1000, upper=1000, scaler=0.001) top.driver.add_desvar('p2.y', lower=-1000, upper=1000, scaler=1000.) top.driver.add_objective('p.f_xy') top.setup() top.run() print('\n') print('Minimum of %f found at (%f, %f)' % (top['p.f_xy'], top['p.x'], top['p.y']))
when I run it on my system, it gives:
2.7.11 |Anaconda 2.5.0 (64-bit)| (default, Jan 29 2016, 14:26:21) [MSC v.1500 64 bit (AMD64)] Python Type "help", "copyright", "credits" or "license" for more information. [evaluate paraboloid_optimize_scaled.py] ############################################## Setup: Checking for potential issues... No recorders have been specified, so no data will be saved. Setup: Check complete. ############################################## Optimization terminated successfully. (Exit mode 0) Current function value: [ 8981902.27846645] Iterations: 1 Function evaluations: 12 Gradient evaluations: 1 Optimization Complete ----------------------------------- Minimum of 8981902.278466 found at (3.000000, -4.000000)
Did I miss something?
回答1:
The scalers are defined opposite in OpenMDAO 1.x than they were in 0.x. In 1.x the following scaling relationship is used.
driver_value = (model_value + adder)*scaler
So you needed to change the scalers compared to what they were in the old tutorial. There was a secondary problem though, a mistake in your analytic derivatives which is also corrected below.
from __future__ import print_function
import sys
from openmdao.api import IndepVarComp, Component, Problem, Group, ScipyOptimizer
class Paraboloid(Component):
def __init__(self):
super(Paraboloid, self).__init__()
self.add_param('x', val=0.0)
self.add_param('y', val=0.0)
self.add_output('f_xy', val=0.0)
def solve_nonlinear(self, params, unknowns, resids):
x = params['x']
y = params['y']
#unknowns['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0
unknowns['f_xy'] = (1000.*x-3.)**2 + (1000.*x)*(0.01*y) + (0.01*y+4.)**2 - 3.
def linearize(self, params, unknowns, resids):
""" Jacobian for our paraboloid."""
x = params['x']
y = params['y']
J = {}
#J['f_xy', 'x'] = 2.0*x - 6.0 + y
#J['f_xy', 'y'] = 2.0*y + 8.0 + x
J['f_xy', 'x'] = 2000000.0*x - 6000.0 + 10.0*y
J['f_xy', 'y'] = 0.0002*y + 0.08 + 10.0*x
return J
if __name__ == "__main__":
top = Problem()
root = top.root = Group()
root.fd_options['force_fd'] = True
root.add('p1', IndepVarComp('x', 3.0))
root.add('p2', IndepVarComp('y', -4.0))
root.add('p', Paraboloid())
root.connect('p1.x', 'p.x')
root.connect('p2.y', 'p.y')
top.driver = ScipyOptimizer()
top.driver.options['optimizer'] = 'SLSQP'
top.driver.add_desvar('p1.x', lower=-1000, upper=1000, scaler=1000.)
top.driver.add_desvar('p2.y', lower=-1000, upper=1000,scaler=.001)
top.driver.add_objective('p.f_xy')
top.setup()
top.run()
print('\n')
print('Minimum of %f found at (%f, %f)' % (top['p.f_xy'], top['p.x'], top['p.y']))
Gives:
Optimization terminated successfully. (Exit mode 0)
Current function value: [-27.333333]
Iterations: 3
Function evaluations: 6
Gradient evaluations: 3
Optimization Complete
-----------------------------------
Minimum of -27.333333 found at (0.006666, -733.299996)
回答2:
I can't replicate your issue directly. You have some weird scalings in your solve_nonlinear and linearize methods, compared to the openmdao 0.x tutorial you're referencing. But when I clean those up I get the correct answer, for reasonable scaler values, and even for some unreasonable ones (the ones you picked are a bit extreme). When you use scaler/adder from add_desvar you should not need to modify your models at all. These values simply modify the values the optimizer sees to help scaling, but are properly transformed back to unscaled values before being passed to your model.
from __future__ import print_function
import sys
from openmdao.api import IndepVarComp, Component, Problem, Group, ScipyOptimizer
class Paraboloid(Component):
def __init__(self):
super(Paraboloid, self).__init__()
self.add_param('x', val=0.0)
self.add_param('y', val=0.0)
self.add_output('f_xy', val=0.0)
def solve_nonlinear(self, params, unknowns, resids):
x = params['x']
y = params['y']
unknowns['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0
def linearize(self, params, unknowns, resids):
""" Jacobian for our paraboloid."""
x = params['x']
y = params['y']
J = {}
J['f_xy', 'x'] = 2.0*x - 6.0 + y
J['f_xy', 'y'] = 2.0*y + 8.0 + x
return J
if __name__ == "__main__":
top = Problem()
root = top.root = Group()
root.add('p1', IndepVarComp('x', 3.0))
root.add('p2', IndepVarComp('y', -4.0))
root.add('p', Paraboloid())
root.connect('p1.x', 'p.x')
root.connect('p2.y', 'p.y')
top.driver = ScipyOptimizer()
top.driver.options['optimizer'] = 'SLSQP'
# top.driver.add_desvar('p1.x', lower=-1000, upper=1000)
# top.driver.add_desvar('p2.y', lower=-1000, upper=1000)
top.driver.add_desvar('p1.x', lower=-1000, upper=1000, scaler=.001)
top.driver.add_desvar('p2.y', lower=-1000, upper=1000, scaler=1000.)
top.driver.add_objective('p.f_xy')
top.setup()
top.run()
print('\n')
print('Minimum of %f found at (%f, %f)' % (top['p.f_xy'], top['p.x'], top['p.y']))
gives:
##############################################
Setup: Checking for potential issues...
No recorders have been specified, so no data will be saved.
Setup: Check complete.
##############################################
Optimization terminated successfully. (Exit mode 0)
Current function value: [-27.33333333]
Iterations: 12
Function evaluations: 15
Gradient evaluations: 12
Optimization Complete
-----------------------------------
来源:https://stackoverflow.com/questions/35542165/paraboloid-optimization-requiring-scaling