From 3dce89108c30818a22fd9e49eb01abd603b38b58 Mon Sep 17 00:00:00 2001 From: Stefano Date: Mon, 13 Mar 2023 15:00:43 +0100 Subject: [PATCH 01/26] Added method to convert input variable structure to Opti variables. --- src/hippopt/__init__.py | 1 + src/hippopt/base/__init__.py | 2 +- src/hippopt/base/opti_solver.py | 95 +++++++++++++++++++++++++ src/hippopt/base/optimization_object.py | 4 +- src/hippopt/base/solver.py | 0 test/test_base.py | 35 ++++++++- 6 files changed, 131 insertions(+), 6 deletions(-) create mode 100644 src/hippopt/base/opti_solver.py delete mode 100644 src/hippopt/base/solver.py diff --git a/src/hippopt/__init__.py b/src/hippopt/__init__.py index 927b4b4f..871deb1f 100644 --- a/src/hippopt/__init__.py +++ b/src/hippopt/__init__.py @@ -1,4 +1,5 @@ from . import base +from .base.opti_solver import OptiSolver from .base.optimization_object import ( OptimizationObject, StorageType, diff --git a/src/hippopt/base/__init__.py b/src/hippopt/base/__init__.py index f2a23186..1fe04632 100644 --- a/src/hippopt/base/__init__.py +++ b/src/hippopt/base/__init__.py @@ -1 +1 @@ -from . import optimization_object, parameter, variable +from . import opti_solver, optimization_object, parameter, variable diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py new file mode 100644 index 00000000..3c6d20ea --- /dev/null +++ b/src/hippopt/base/opti_solver.py @@ -0,0 +1,95 @@ +import copy +import dataclasses +from collections.abc import Iterable +from functools import singledispatchmethod +from typing import List, Type + +import casadi as cs +import numpy as np + +from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject +from hippopt.base.parameter import Parameter +from hippopt.base.variable import Variable + + +@dataclasses.dataclass +class OptiSolver: + _solver: cs.Opti + + def __init__(self): + self._solver = cs.Opti() + + @singledispatchmethod + def generate_optimization_objects( + self, input_structure: Type[OptimizationObject] | List[Type[OptimizationObject]] + ): + pass + + @generate_optimization_objects.register + def _(self, input_structure: OptimizationObject) -> TOptimizationObject: + output = copy.deepcopy(input_structure) + + for field in dataclasses.fields(output): + has_storage_field = OptimizationObject.StorageTypeField in field.metadata + + if ( + has_storage_field + and field.metadata[OptimizationObject.StorageTypeField] + == Variable.StorageType + ): + value = output.__dict__[field.name] + value = ( + value + if not isinstance(value, np.ndarray) + else np.expand_dims(value, axis=1) + ) + output.__setattr__(field.name, self._solver.variable(*value.shape)) + continue + + if ( + has_storage_field + and field.metadata[OptimizationObject.StorageTypeField] + == Parameter.StorageType + ): + value = output.__dict__[field.name] + value = ( + value + if not isinstance(value, np.ndarray) + else np.expand_dims(value, axis=1) + ) + output.__setattr__(field.name, self._solver.parameter(*value.shape)) + continue + + composite_value = output.__getattribute__(field.name) + + is_iterable = isinstance(composite_value, Iterable) + list_of_optimization_objects = is_iterable and all( + isinstance(elem, OptimizationObject) for elem in composite_value + ) + + if ( + isinstance(composite_value, OptimizationObject) + or list_of_optimization_objects + ): + output.__setattr__( + field.name, self.generate_optimization_objects(composite_value) + ) + + return output + + @generate_optimization_objects.register + def _(self, input_structure: list) -> List[TOptimizationObject]: + list_of_optimization_objects = isinstance(input_structure, Iterable) and all( + isinstance(elem, OptimizationObject) for elem in input_structure + ) + + assert ( + isinstance(input_structure, OptimizationObject) + or list_of_optimization_objects + ) + + output = copy.deepcopy(input_structure) + for i in range(len(output)): + output[i] = self.generate_optimization_objects(output[i]) + + return output diff --git a/src/hippopt/base/optimization_object.py b/src/hippopt/base/optimization_object.py index 4d75da95..91d7b4f9 100644 --- a/src/hippopt/base/optimization_object.py +++ b/src/hippopt/base/optimization_object.py @@ -13,6 +13,7 @@ @dataclasses.dataclass class OptimizationObject(abc.ABC): StorageType: ClassVar[str] = "generic" + StorageTypeField: ClassVar[str] = "StorageType" StorageTypeMetadata: ClassVar[dict[str, Any]] = dict(StorageType=StorageType) def get_default_initialization( @@ -32,10 +33,9 @@ def get_default_initialized_object( """ output = copy.deepcopy(self) - output_dict = dataclasses.asdict(output) for field in dataclasses.fields(output): - if "StorageType" in field.metadata: + if self.StorageTypeField in field.metadata: output.__setattr__( field.name, output.get_default_initialization(field.name) ) diff --git a/src/hippopt/base/solver.py b/src/hippopt/base/solver.py deleted file mode 100644 index e69de29b..00000000 diff --git a/test/test_base.py b/test/test_base.py index 8c7dc394..549f9ddd 100644 --- a/test/test_base.py +++ b/test/test_base.py @@ -1,9 +1,11 @@ import dataclasses +import casadi as cs import numpy as np from hippopt import ( OptimizationObject, + OptiSolver, Parameter, StorageType, TOptimizationObject, @@ -71,18 +73,17 @@ def test_custom_initialization(): @dataclasses.dataclass class AggregateClass(OptimizationObject): - aggregated: CustomInitializationVariable + aggregated: CustomInitializationVariable = CustomInitializationVariable() other_parameter: StorageType = default_storage_field(cls=Parameter) other: str = "" def __post_init__(self): - self.aggregated = CustomInitializationVariable() self.other_parameter = np.ones(3) self.other = "untouched" def test_aggregated(): - test_var = AggregateClass(aggregated=CustomInitializationVariable()) + test_var = AggregateClass() test_var_init = test_var.get_default_initialized_object() assert test_var_init.aggregated.parameter.shape == (3,) assert np.all(test_var_init.aggregated.parameter == 0) @@ -91,3 +92,31 @@ def test_aggregated(): assert test_var_init.other_parameter.shape == (3,) assert np.all(test_var_init.other_parameter == 0) assert test_var_init.other == "untouched" + + +def test_generate_objects(): + test_var = AggregateClass() + solver = OptiSolver() + opti_var = solver.generate_optimization_objects(test_var) + assert isinstance(opti_var.aggregated.parameter, cs.MX) + assert opti_var.aggregated.parameter.shape == (3, 1) + assert isinstance(opti_var.aggregated.variable, cs.MX) + assert opti_var.aggregated.variable.shape == (3, 1) + assert isinstance(opti_var.other_parameter, cs.MX) + assert opti_var.other_parameter.shape == (3, 1) + assert opti_var.other == "untouched" + + +def test_generate_objects_list(): + test_var_list = [AggregateClass()] * 2 + solver = OptiSolver() + opti_var_list = solver.generate_optimization_objects(test_var_list) + assert len(opti_var_list) == 2 + for opti_var in opti_var_list: + assert isinstance(opti_var.aggregated.parameter, cs.MX) + assert opti_var.aggregated.parameter.shape == (3, 1) + assert isinstance(opti_var.aggregated.variable, cs.MX) + assert opti_var.aggregated.variable.shape == (3, 1) + assert isinstance(opti_var.other_parameter, cs.MX) + assert opti_var.other_parameter.shape == (3, 1) + assert opti_var.other == "untouched" From 5235823d78e9bee8b44d8014e406288b9290cdd1 Mon Sep 17 00:00:00 2001 From: Stefano Date: Mon, 13 Mar 2023 15:22:14 +0100 Subject: [PATCH 02/26] Using default_factory in test for CustomInitializationVariable --- test/test_base.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/test_base.py b/test/test_base.py index 549f9ddd..b019f702 100644 --- a/test/test_base.py +++ b/test/test_base.py @@ -73,7 +73,9 @@ def test_custom_initialization(): @dataclasses.dataclass class AggregateClass(OptimizationObject): - aggregated: CustomInitializationVariable = CustomInitializationVariable() + aggregated: CustomInitializationVariable = dataclasses.field( + default_factory=CustomInitializationVariable + ) other_parameter: StorageType = default_storage_field(cls=Parameter) other: str = "" From f00f2d72b128a5545e4eb4578232973c124b5fb2 Mon Sep 17 00:00:00 2001 From: Stefano Date: Mon, 13 Mar 2023 15:41:41 +0100 Subject: [PATCH 03/26] Added possibility to retrieve the list of variables after they have been set. --- src/hippopt/base/opti_solver.py | 15 +++++++++++---- test/test_base.py | 2 ++ 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index 3c6d20ea..32376ffc 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -14,10 +14,10 @@ @dataclasses.dataclass class OptiSolver: - _solver: cs.Opti - - def __init__(self): - self._solver = cs.Opti() + _solver: cs.Opti = dataclasses.field(default_factory=cs.Opti) + _variables: TOptimizationObject | List[TOptimizationObject] = dataclasses.field( + default=None + ) @singledispatchmethod def generate_optimization_objects( @@ -75,6 +75,7 @@ def _(self, input_structure: OptimizationObject) -> TOptimizationObject: field.name, self.generate_optimization_objects(composite_value) ) + self._variables = output return output @generate_optimization_objects.register @@ -92,4 +93,10 @@ def _(self, input_structure: list) -> List[TOptimizationObject]: for i in range(len(output)): output[i] = self.generate_optimization_objects(output[i]) + self._variables = output return output + + def get_optimization_objects( + self, + ) -> TOptimizationObject | List[TOptimizationObject]: + return self._variables diff --git a/test/test_base.py b/test/test_base.py index b019f702..1846b7ac 100644 --- a/test/test_base.py +++ b/test/test_base.py @@ -107,6 +107,7 @@ def test_generate_objects(): assert isinstance(opti_var.other_parameter, cs.MX) assert opti_var.other_parameter.shape == (3, 1) assert opti_var.other == "untouched" + assert solver.get_optimization_objects() is opti_var def test_generate_objects_list(): @@ -122,3 +123,4 @@ def test_generate_objects_list(): assert isinstance(opti_var.other_parameter, cs.MX) assert opti_var.other_parameter.shape == (3, 1) assert opti_var.other == "untouched" + assert solver.get_optimization_objects() is opti_var_list From 8c462f4f4f164e56b2fb8f32f4757eeec302a0b6 Mon Sep 17 00:00:00 2001 From: Stefano Date: Wed, 15 Mar 2023 17:13:52 +0100 Subject: [PATCH 04/26] Started implementing a basic optimization problem. --- src/hippopt/base/opti_solver.py | 55 ++++++++++++++++++++++-- src/hippopt/base/optimization_problem.py | 48 +++++++++++++++++++++ src/hippopt/base/problem.py | 0 src/hippopt/base/solver.py | 30 +++++++++++++ 4 files changed, 130 insertions(+), 3 deletions(-) create mode 100644 src/hippopt/base/optimization_problem.py delete mode 100644 src/hippopt/base/problem.py create mode 100644 src/hippopt/base/solver.py diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index 32376ffc..21dc0b18 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -2,23 +2,39 @@ import dataclasses from collections.abc import Iterable from functools import singledispatchmethod -from typing import List, Type +from typing import Any, ClassVar, List, Type import casadi as cs import numpy as np from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject from hippopt.base.parameter import Parameter +from hippopt.base.solver import Solver from hippopt.base.variable import Variable @dataclasses.dataclass -class OptiSolver: - _solver: cs.Opti = dataclasses.field(default_factory=cs.Opti) +class OptiSolver(Solver): + DefaultSolverType: ClassVar[str] = "ipopt" + _inner_solver: str = dataclasses.field(default=DefaultSolverType) + _problem_type: dataclasses.InitVar[str] = dataclasses.field(default="nlp") + + _options_plugin: dict[str, Any] = dataclasses.field(default_factory=dict) + _options_solver: dict[str, Any] = dataclasses.field(default_factory=dict) + + _cost: cs.MX = dataclasses.field(default=None) + _solver: cs.Opti = dataclasses.field(default=None) + _solution: cs.OptiSol = dataclasses.field(default=None) _variables: TOptimizationObject | List[TOptimizationObject] = dataclasses.field( default=None ) + def __post_init__(self, _problem_type: str) -> None: + self._solver = cs.Opti(_problem_type) + self._solver.solver( + self._inner_solver, self._options_plugin, self._options_solver + ) + @singledispatchmethod def generate_optimization_objects( self, input_structure: Type[OptimizationObject] | List[Type[OptimizationObject]] @@ -100,3 +116,36 @@ def get_optimization_objects( self, ) -> TOptimizationObject | List[TOptimizationObject]: return self._variables + + def set_opti_options( + self, + inner_solver: str = None, + options_plugin: dict[str, Any] = None, + options_solver: dict[str, Any] = None, + ): + if inner_solver is not None: + self._inner_solver = inner_solver + if options_plugin is not None: + self._options_plugin = options_plugin + if options_solver is not None: + self._options_solver = options_solver + + self._solver.solver( + self._inner_solver, self._options_plugin, self._options_solver + ) + + def solve(self): + self._solver.minimize(self._cost) + self._solution = self._solver.solve() + + def add_cost(self, input_cost: cs.Function): + # TODO Stefano: Check if it is a constraint. If is an equality, add the 2-norm. If it is an inequality? + if self._cost is None: + _cost = input_cost + return + + self._cost += input_cost + + def add_constraint(self, input_constraint: cs.Function): + # TODO Stefano: Check if it is a cost. If so, set it equal to zero + self._solver.subject_to(input_constraint) diff --git a/src/hippopt/base/optimization_problem.py b/src/hippopt/base/optimization_problem.py new file mode 100644 index 00000000..b9d285b6 --- /dev/null +++ b/src/hippopt/base/optimization_problem.py @@ -0,0 +1,48 @@ +import abc +import dataclasses +from enum import Enum +from functools import singledispatchmethod +from typing import Generator, List, Type + +import casadi as cs + +from hippopt.base.opti_solver import OptiSolver +from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject +from hippopt.base.solver import TSolver + + +class ExpressionType(Enum): + skip = 0 + subject_to = 1 + minimize = 2 + + +@dataclasses.dataclass +class OptimizationProblem(abc.ABC): + _solver: TSolver = dataclasses.field(default=OptiSolver) + + def generate_optimization_objects( + self, input_structure: Type[OptimizationObject] | List[Type[OptimizationObject]] + ) -> TOptimizationObject | List[TOptimizationObject]: + return self._solver.generate_optimization_objects(input_structure) + + @singledispatchmethod + def add_expression( + self, mode: ExpressionType, expression: cs.MX | Generator[cs.MX] + ): + pass + + @add_expression.register + def add_expression(self, mode: ExpressionType, expression: cs.MX): + match mode: + case ExpressionType.subject_to: + self._solver.add_cost(expression) + case ExpressionType.minimize: + self._solver.add_constraint(expression) + case _: + pass + + @add_expression.register + def add_expression(self, mode: ExpressionType, expressions: Generator[cs.MX]): + for expr in expressions: + self.add_expression(mode, expr) diff --git a/src/hippopt/base/problem.py b/src/hippopt/base/problem.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/hippopt/base/solver.py b/src/hippopt/base/solver.py new file mode 100644 index 00000000..d80902dd --- /dev/null +++ b/src/hippopt/base/solver.py @@ -0,0 +1,30 @@ +import abc +import dataclasses +from typing import List, Type, TypeVar + +import casadi as cs + +from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject + +TSolver = TypeVar("TSolver", bound="Solver") + + +@dataclasses.dataclass +class Solver(abc.ABC): + @abc.abstractmethod + def generate_optimization_objects( + self, input_structure: Type[OptimizationObject] | List[Type[OptimizationObject]] + ): + pass + + @abc.abstractmethod + def solve(self): + pass + + @abc.abstractmethod + def add_cost(self, input_cost: cs.Function): + pass + + @abc.abstractmethod + def add_constraint(self, input_constraint: cs.Function): + pass From 9fde714d492ce3200f68f2f2f41bfcdac8b9fd19 Mon Sep 17 00:00:00 2001 From: Stefano Date: Wed, 15 Mar 2023 17:20:40 +0100 Subject: [PATCH 05/26] Renamed solver to a more specific OptimizationSolver. --- src/hippopt/base/opti_solver.py | 4 ++-- src/hippopt/base/optimization_problem.py | 4 ++-- src/hippopt/base/{solver.py => optimization_solver.py} | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) rename src/hippopt/base/{solver.py => optimization_solver.py} (74%) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index 21dc0b18..7013fba5 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -8,13 +8,13 @@ import numpy as np from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject +from hippopt.base.optimization_solver import OptimizationSolver from hippopt.base.parameter import Parameter -from hippopt.base.solver import Solver from hippopt.base.variable import Variable @dataclasses.dataclass -class OptiSolver(Solver): +class OptiSolver(OptimizationSolver): DefaultSolverType: ClassVar[str] = "ipopt" _inner_solver: str = dataclasses.field(default=DefaultSolverType) _problem_type: dataclasses.InitVar[str] = dataclasses.field(default="nlp") diff --git a/src/hippopt/base/optimization_problem.py b/src/hippopt/base/optimization_problem.py index b9d285b6..3e217e89 100644 --- a/src/hippopt/base/optimization_problem.py +++ b/src/hippopt/base/optimization_problem.py @@ -8,7 +8,7 @@ from hippopt.base.opti_solver import OptiSolver from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject -from hippopt.base.solver import TSolver +from hippopt.base.optimization_solver import TOptimizationSolver class ExpressionType(Enum): @@ -19,7 +19,7 @@ class ExpressionType(Enum): @dataclasses.dataclass class OptimizationProblem(abc.ABC): - _solver: TSolver = dataclasses.field(default=OptiSolver) + _solver: TOptimizationSolver = dataclasses.field(default=OptiSolver) def generate_optimization_objects( self, input_structure: Type[OptimizationObject] | List[Type[OptimizationObject]] diff --git a/src/hippopt/base/solver.py b/src/hippopt/base/optimization_solver.py similarity index 74% rename from src/hippopt/base/solver.py rename to src/hippopt/base/optimization_solver.py index d80902dd..d5113da5 100644 --- a/src/hippopt/base/solver.py +++ b/src/hippopt/base/optimization_solver.py @@ -4,13 +4,13 @@ import casadi as cs -from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject +from hippopt.base.optimization_object import OptimizationObject -TSolver = TypeVar("TSolver", bound="Solver") +TOptimizationSolver = TypeVar("TOptimizationSolver", bound="OptimizationSolver") @dataclasses.dataclass -class Solver(abc.ABC): +class OptimizationSolver(abc.ABC): @abc.abstractmethod def generate_optimization_objects( self, input_structure: Type[OptimizationObject] | List[Type[OptimizationObject]] From 69316298869956fa7a95b14b8e909c37e599a2e7 Mon Sep 17 00:00:00 2001 From: Stefano Date: Thu, 16 Mar 2023 18:46:51 +0100 Subject: [PATCH 06/26] Added first working test of optimization problem. --- src/hippopt/__init__.py | 1 + src/hippopt/base/__init__.py | 8 ++- src/hippopt/base/opti_solver.py | 36 +++++++------ src/hippopt/base/optimization_problem.py | 45 ++++++++--------- src/hippopt/base/optimization_solver.py | 11 ++-- test/test_optimization_problem.py | 64 ++++++++++++++++++++++++ 6 files changed, 122 insertions(+), 43 deletions(-) create mode 100644 test/test_optimization_problem.py diff --git a/src/hippopt/__init__.py b/src/hippopt/__init__.py index 871deb1f..b4c0b8a2 100644 --- a/src/hippopt/__init__.py +++ b/src/hippopt/__init__.py @@ -6,5 +6,6 @@ TOptimizationObject, default_storage_field, ) +from .base.optimization_problem import ExpressionType, OptimizationProblem from .base.parameter import Parameter, TParameter from .base.variable import TVariable, Variable diff --git a/src/hippopt/base/__init__.py b/src/hippopt/base/__init__.py index 1fe04632..5498a41f 100644 --- a/src/hippopt/base/__init__.py +++ b/src/hippopt/base/__init__.py @@ -1 +1,7 @@ -from . import opti_solver, optimization_object, parameter, variable +from . import ( + opti_solver, + optimization_object, + optimization_problem, + parameter, + variable, +) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index 7013fba5..d2c3f0c6 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -1,8 +1,7 @@ import copy import dataclasses from collections.abc import Iterable -from functools import singledispatchmethod -from typing import Any, ClassVar, List, Type +from typing import Any, ClassVar, List import casadi as cs import numpy as np @@ -35,14 +34,9 @@ def __post_init__(self, _problem_type: str) -> None: self._inner_solver, self._options_plugin, self._options_solver ) - @singledispatchmethod - def generate_optimization_objects( - self, input_structure: Type[OptimizationObject] | List[Type[OptimizationObject]] - ): - pass - - @generate_optimization_objects.register - def _(self, input_structure: OptimizationObject) -> TOptimizationObject: + def _generate_objects_from_instance( + self, input_structure: OptimizationObject + ) -> TOptimizationObject: output = copy.deepcopy(input_structure) for field in dataclasses.fields(output): @@ -94,8 +88,9 @@ def _(self, input_structure: OptimizationObject) -> TOptimizationObject: self._variables = output return output - @generate_optimization_objects.register - def _(self, input_structure: list) -> List[TOptimizationObject]: + def _generate_objects_from_list( + self, input_structure: list + ) -> List[TOptimizationObject]: list_of_optimization_objects = isinstance(input_structure, Iterable) and all( isinstance(elem, OptimizationObject) for elem in input_structure ) @@ -112,6 +107,13 @@ def _(self, input_structure: list) -> List[TOptimizationObject]: self._variables = output return output + def generate_optimization_objects( + self, input_structure: OptimizationObject | List[OptimizationObject] + ): + if isinstance(input_structure, OptimizationObject): + return self._generate_objects_from_instance(input_structure=input_structure) + return self._generate_objects_from_list(input_structure=input_structure) + def get_optimization_objects( self, ) -> TOptimizationObject | List[TOptimizationObject]: @@ -137,15 +139,19 @@ def set_opti_options( def solve(self): self._solver.minimize(self._cost) self._solution = self._solver.solve() + return self._solution - def add_cost(self, input_cost: cs.Function): + def add_cost(self, input_cost: cs.MX): # TODO Stefano: Check if it is a constraint. If is an equality, add the 2-norm. If it is an inequality? if self._cost is None: - _cost = input_cost + self._cost = input_cost return self._cost += input_cost - def add_constraint(self, input_constraint: cs.Function): + def add_constraint(self, input_constraint: cs.MX): # TODO Stefano: Check if it is a cost. If so, set it equal to zero self._solver.subject_to(input_constraint) + + def cost(self) -> cs.MX: + return self._cost diff --git a/src/hippopt/base/optimization_problem.py b/src/hippopt/base/optimization_problem.py index 3e217e89..4934a31b 100644 --- a/src/hippopt/base/optimization_problem.py +++ b/src/hippopt/base/optimization_problem.py @@ -1,14 +1,14 @@ import abc import dataclasses +import types from enum import Enum -from functools import singledispatchmethod from typing import Generator, List, Type import casadi as cs from hippopt.base.opti_solver import OptiSolver from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject -from hippopt.base.optimization_solver import TOptimizationSolver +from hippopt.base.optimization_solver import OptimizationSolver class ExpressionType(Enum): @@ -19,30 +19,29 @@ class ExpressionType(Enum): @dataclasses.dataclass class OptimizationProblem(abc.ABC): - _solver: TOptimizationSolver = dataclasses.field(default=OptiSolver) + _solver: OptimizationSolver = dataclasses.field(default_factory=OptiSolver) def generate_optimization_objects( - self, input_structure: Type[OptimizationObject] | List[Type[OptimizationObject]] + self, input_structure: OptimizationObject | List[Type[OptimizationObject]] ) -> TOptimizationObject | List[TOptimizationObject]: - return self._solver.generate_optimization_objects(input_structure) + return self._solver.generate_optimization_objects( + input_structure=input_structure + ) - @singledispatchmethod def add_expression( - self, mode: ExpressionType, expression: cs.MX | Generator[cs.MX] + self, mode: ExpressionType, expression: cs.MX | Generator[cs.MX, None, None] ): - pass - - @add_expression.register - def add_expression(self, mode: ExpressionType, expression: cs.MX): - match mode: - case ExpressionType.subject_to: - self._solver.add_cost(expression) - case ExpressionType.minimize: - self._solver.add_constraint(expression) - case _: - pass - - @add_expression.register - def add_expression(self, mode: ExpressionType, expressions: Generator[cs.MX]): - for expr in expressions: - self.add_expression(mode, expr) + if isinstance(expression, types.GeneratorType): + for expr in expression: + self.add_expression(mode, expr) + else: + match mode: + case ExpressionType.subject_to: + self._solver.add_constraint(expression) + case ExpressionType.minimize: + self._solver.add_cost(expression) + case _: + pass + + def solver(self) -> OptimizationSolver: + return self._solver diff --git a/src/hippopt/base/optimization_solver.py b/src/hippopt/base/optimization_solver.py index d5113da5..e5542ba6 100644 --- a/src/hippopt/base/optimization_solver.py +++ b/src/hippopt/base/optimization_solver.py @@ -1,6 +1,6 @@ import abc import dataclasses -from typing import List, Type, TypeVar +from typing import List, TypeVar import casadi as cs @@ -13,7 +13,7 @@ class OptimizationSolver(abc.ABC): @abc.abstractmethod def generate_optimization_objects( - self, input_structure: Type[OptimizationObject] | List[Type[OptimizationObject]] + self, input_structure: OptimizationObject | List[OptimizationObject] ): pass @@ -22,9 +22,12 @@ def solve(self): pass @abc.abstractmethod - def add_cost(self, input_cost: cs.Function): + def add_cost(self, input_cost: cs.MX): pass @abc.abstractmethod - def add_constraint(self, input_constraint: cs.Function): + def add_constraint(self, input_constraint: cs.MX): + pass + + def cost(self) -> cs.MX: pass diff --git a/test/test_optimization_problem.py b/test/test_optimization_problem.py new file mode 100644 index 00000000..31d6c4e4 --- /dev/null +++ b/test/test_optimization_problem.py @@ -0,0 +1,64 @@ +import dataclasses + +import casadi as cs +import numpy as np +import pytest + +from hippopt import ( + ExpressionType, + OptimizationObject, + OptimizationProblem, + StorageType, + Variable, + default_storage_field, +) + + +@dataclasses.dataclass +class TestVar(OptimizationObject): + variable: StorageType = default_storage_field(Variable) + + def __post_init__(self): + self.variable = np.zeros(3) + + +def test_opti_solver(): + problem = OptimizationProblem() + var = problem.generate_optimization_objects(input_structure=TestVar()) + np.random.seed(123) + a = 10.0 * np.random.rand(3) + 0.01 + b = 20.0 * np.random.rand(3) - 10.0 + c = 20.0 * np.random.rand(3) - 10.0 + + problem.add_expression( + mode=ExpressionType.minimize, + expression=( + a[k] * cs.power(var.variable[k], 2) + b[k] * var.variable[k] + for k in range(0, 3) + ), + ) + + problem.add_expression( + mode=ExpressionType.subject_to, + expression=(var.variable[k] >= c[k] for k in range(3)), # noqa + ) + + output = problem.solver().solve() + # TODO: Stefano The output should not be opti specific + + expected_x = np.zeros(3) + expected_cost = 0 + for i in range(3): + expected = -b[i] / (2 * a[i]) + expected_x[i] = expected if expected >= c[i] else c[i] + expected_cost += ( + -b[i] ** 2 / (4 * a[i]) + if expected >= c[i] + else a[i] * (c[i] ** 2) + b[i] * c[i] + ) + + assert output.value(var.variable) == pytest.approx(expected_x) + assert output.value(problem.solver().cost()) == pytest.approx(expected_cost) + + +# TODO: Stefano test setting of initial condition and of parameters From 6fb670126e72df0bcc8a798a089b4fb284db103d Mon Sep 17 00:00:00 2001 From: Stefano Date: Thu, 16 Mar 2023 18:59:45 +0100 Subject: [PATCH 07/26] Using action/checkout v3 --- .github/workflows/ci_cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index 06b81e92..cfc98c37 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -25,7 +25,7 @@ jobs: steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: conda-incubator/setup-miniconda@v2 with: From 83f64aee4e6e3b04baf1aae0116a57d902de7720 Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 17 Mar 2023 10:49:43 +0100 Subject: [PATCH 08/26] Added mechanism to fill back the optimization structure when retrieving the output --- src/hippopt/base/opti_solver.py | 75 ++++++++++++++++++++++--- src/hippopt/base/optimization_solver.py | 14 ++++- test/test_optimization_problem.py | 11 ++-- 3 files changed, 85 insertions(+), 15 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index d2c3f0c6..e487c3c8 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -1,7 +1,7 @@ import copy import dataclasses from collections.abc import Iterable -from typing import Any, ClassVar, List +from typing import Any, ClassVar, List, Tuple import casadi as cs import numpy as np @@ -23,7 +23,11 @@ class OptiSolver(OptimizationSolver): _cost: cs.MX = dataclasses.field(default=None) _solver: cs.Opti = dataclasses.field(default=None) - _solution: cs.OptiSol = dataclasses.field(default=None) + _opti_solution: cs.OptiSol = dataclasses.field(default=None) + _output_solution: TOptimizationObject | List[ + TOptimizationObject + ] = dataclasses.field(default=None) + _output_cost: float = dataclasses.field(default=None) _variables: TOptimizationObject | List[TOptimizationObject] = dataclasses.field( default=None ) @@ -36,7 +40,7 @@ def __post_init__(self, _problem_type: str) -> None: def _generate_objects_from_instance( self, input_structure: OptimizationObject - ) -> TOptimizationObject: + ) -> OptimizationObject: output = copy.deepcopy(input_structure) for field in dataclasses.fields(output): @@ -90,7 +94,7 @@ def _generate_objects_from_instance( def _generate_objects_from_list( self, input_structure: list - ) -> List[TOptimizationObject]: + ) -> List[OptimizationObject]: list_of_optimization_objects = isinstance(input_structure, Iterable) and all( isinstance(elem, OptimizationObject) for elem in input_structure ) @@ -107,6 +111,53 @@ def _generate_objects_from_list( self._variables = output return output + def _generate_solution_output( + self, variables: OptimizationObject | List[OptimizationObject] + ) -> OptimizationObject | List[OptimizationObject]: + output = copy.deepcopy(variables) + + if isinstance(variables, Iterable): + i = 0 + for element in variables: + output[i] = self._generate_solution_output(element) + + return output + + for field in dataclasses.fields(variables): + has_storage_field = OptimizationObject.StorageTypeField in field.metadata + + if ( + has_storage_field + and ( + field.metadata[OptimizationObject.StorageTypeField] + == Variable.StorageType + ) + or ( + field.metadata[OptimizationObject.StorageTypeField] + == Parameter.StorageType + ) + ): + var = variables.__dict__[field.name] + output.__setattr__(field.name, self._opti_solution.value(var)) + continue + + composite_variable = variables.__getattribute__(field.name) + + is_iterable = isinstance(composite_variable, Iterable) + list_of_optimization_objects = is_iterable and all( + isinstance(elem, OptimizationObject) for elem in composite_variable + ) + + if ( + isinstance(composite_variable, OptimizationObject) + or list_of_optimization_objects + ): + output.__setattr__( + field.name, self._generate_solution_output(composite_variable) + ) + + return output + def generate_optimization_objects( self, input_structure: OptimizationObject | List[OptimizationObject] ): @@ -136,10 +187,18 @@ def set_opti_options( self._inner_solver, self._options_plugin, self._options_solver ) - def solve(self): + def solve(self) -> Tuple[OptimizationObject, float]: self._solver.minimize(self._cost) - self._solution = self._solver.solve() - return self._solution + self._opti_solution = self._solver.solve() + self._output_cost = self._opti_solution.value(self._cost) + self._output_solution = self._generate_solution_output(self._variables) + return self._output_solution, self._output_cost + + def get_solution(self) -> OptimizationObject | List[OptimizationObject] | None: + return self._output_solution + + def get_cost_value(self) -> float | None: + return self._output_cost def add_cost(self, input_cost: cs.MX): # TODO Stefano: Check if it is a constraint. If is an equality, add the 2-norm. If it is an inequality? @@ -153,5 +212,5 @@ def add_constraint(self, input_constraint: cs.MX): # TODO Stefano: Check if it is a cost. If so, set it equal to zero self._solver.subject_to(input_constraint) - def cost(self) -> cs.MX: + def cost_function(self) -> cs.MX: return self._cost diff --git a/src/hippopt/base/optimization_solver.py b/src/hippopt/base/optimization_solver.py index e5542ba6..2eaf4d92 100644 --- a/src/hippopt/base/optimization_solver.py +++ b/src/hippopt/base/optimization_solver.py @@ -1,6 +1,6 @@ import abc import dataclasses -from typing import List, TypeVar +from typing import List, Tuple, TypeVar import casadi as cs @@ -18,7 +18,15 @@ def generate_optimization_objects( pass @abc.abstractmethod - def solve(self): + def solve(self) -> Tuple[OptimizationObject, float]: + pass + + @abc.abstractmethod + def get_solution(self) -> OptimizationObject | List[OptimizationObject] | None: + pass + + @abc.abstractmethod + def get_cost_value(self) -> float | None: pass @abc.abstractmethod @@ -29,5 +37,5 @@ def add_cost(self, input_cost: cs.MX): def add_constraint(self, input_constraint: cs.MX): pass - def cost(self) -> cs.MX: + def cost_function(self) -> cs.MX: pass diff --git a/test/test_optimization_problem.py b/test/test_optimization_problem.py index 31d6c4e4..d1991f4d 100644 --- a/test/test_optimization_problem.py +++ b/test/test_optimization_problem.py @@ -43,8 +43,7 @@ def test_opti_solver(): expression=(var.variable[k] >= c[k] for k in range(3)), # noqa ) - output = problem.solver().solve() - # TODO: Stefano The output should not be opti specific + output, cost_value = problem.solver().solve() expected_x = np.zeros(3) expected_cost = 0 @@ -57,8 +56,12 @@ def test_opti_solver(): else a[i] * (c[i] ** 2) + b[i] * c[i] ) - assert output.value(var.variable) == pytest.approx(expected_x) - assert output.value(problem.solver().cost()) == pytest.approx(expected_cost) + assert output.variable == pytest.approx(expected_x) # noqa + assert cost_value == pytest.approx(expected_cost) + + assert problem.solver().get_solution().variable == pytest.approx(expected_x) # noqa + assert problem.solver().get_cost_value() == pytest.approx(expected_cost) # TODO: Stefano test setting of initial condition and of parameters +# TODO: Stefano add test with list of variables From a12be92704e09f59978e77c2a19786a353eb9409 Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 17 Mar 2023 18:33:19 +0100 Subject: [PATCH 09/26] Added possibility to set guess . --- src/hippopt/base/opti_solver.py | 170 +++++++++++++++++++++-- src/hippopt/base/optimization_problem.py | 6 +- src/hippopt/base/optimization_solver.py | 6 + test/test_base.py | 4 +- test/test_optimization_problem.py | 127 ++++++++++++++++- 5 files changed, 297 insertions(+), 16 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index e487c3c8..fc78506d 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -1,6 +1,5 @@ import copy import dataclasses -from collections.abc import Iterable from typing import Any, ClassVar, List, Tuple import casadi as cs @@ -76,8 +75,8 @@ def _generate_objects_from_instance( composite_value = output.__getattribute__(field.name) - is_iterable = isinstance(composite_value, Iterable) - list_of_optimization_objects = is_iterable and all( + is_list = isinstance(composite_value, list) + list_of_optimization_objects = is_list and all( isinstance(elem, OptimizationObject) for elem in composite_value ) @@ -95,7 +94,7 @@ def _generate_objects_from_instance( def _generate_objects_from_list( self, input_structure: list ) -> List[OptimizationObject]: - list_of_optimization_objects = isinstance(input_structure, Iterable) and all( + list_of_optimization_objects = isinstance(input_structure, list) and all( isinstance(elem, OptimizationObject) for elem in input_structure ) @@ -116,19 +115,19 @@ def _generate_solution_output( ) -> OptimizationObject | List[OptimizationObject]: output = copy.deepcopy(variables) - if isinstance(variables, Iterable): + if isinstance(variables, list): i = 0 for element in variables: output[i] = self._generate_solution_output(element) + i += 1 return output for field in dataclasses.fields(variables): has_storage_field = OptimizationObject.StorageTypeField in field.metadata - if ( - has_storage_field - and ( + if has_storage_field and ( + ( field.metadata[OptimizationObject.StorageTypeField] == Variable.StorageType ) @@ -143,8 +142,8 @@ def _generate_solution_output( composite_variable = variables.__getattribute__(field.name) - is_iterable = isinstance(composite_variable, Iterable) - list_of_optimization_objects = is_iterable and all( + is_list = isinstance(composite_variable, list) + list_of_optimization_objects = is_list and all( isinstance(elem, OptimizationObject) for elem in composite_variable ) @@ -158,6 +157,129 @@ def _generate_solution_output( return output + def _set_initial_guess_internal( + self, + initial_guess: OptimizationObject, + corresponding_variable: OptimizationObject, + ): + for field in dataclasses.fields(initial_guess): + has_storage_field = OptimizationObject.StorageTypeField in field.metadata + + if ( + has_storage_field + and field.metadata[OptimizationObject.StorageTypeField] + == Variable.StorageType + ): + guess = initial_guess.__dict__[field.name] + + if guess is None: + continue + + if not isinstance(guess, np.ndarray): + raise ValueError( + "The guess for the field " + + field.name + + " is not an numpy array." + ) + + if not hasattr(corresponding_variable, field.name): + raise ValueError( + "The guess has the field " + + field.name + + " but it is not present in the optimization variables" + ) + + self._solver.set_initial( + corresponding_variable.__getattribute__(field.name), guess + ) + continue + + if ( + has_storage_field + and field.metadata[OptimizationObject.StorageTypeField] + == Parameter.StorageType + ): + guess = initial_guess.__dict__[field.name] + + if guess is None: + continue + + if not isinstance(guess, np.ndarray): + raise ValueError( + "The guess for the field " + + field.name + + " is not an numpy array." + ) + + if not hasattr(corresponding_variable, field.name): + raise ValueError( + "The guess has the field " + + field.name + + " but it is not present in the optimization parameters" + ) + + self._solver.set_value( + corresponding_variable.__getattribute__(field.name), guess + ) + continue + + composite_variable_guess = initial_guess.__getattribute__(field.name) + + if isinstance(composite_variable_guess, OptimizationObject): + if not hasattr(corresponding_variable, field.name): + raise ValueError( + "The guess has the field " + + field.name + + " but it is not present in the optimization structure" + ) + + self._set_initial_guess_internal( + initial_guess=composite_variable_guess, + corresponding_variable=corresponding_variable.__getattribute__( + field.name + ), + ) + continue + + is_list = isinstance(composite_variable_guess, list) + list_of_optimization_objects = is_list and all( + isinstance(elem, OptimizationObject) + for elem in composite_variable_guess + ) + + if list_of_optimization_objects: + if not hasattr(corresponding_variable, field.name): + raise ValueError( + "The guess has the field " + + field.name + + " but it is not present in the optimization structure" + ) + corresponding_nested_variable = corresponding_variable.__getattribute__( + field.name + ) + + if not isinstance(corresponding_nested_variable, list): + raise ValueError( + "The guess has the field " + + field.name + + " as list, but the corresponding structure is not a list" + ) + + i = 0 + for element in composite_variable_guess: + if i >= len(corresponding_nested_variable): + raise ValueError( + "The input guess is the list " + + field.name + + " but the corresponding variable structure is not a list" + ) + + self._set_initial_guess_internal( + initial_guess=element, + corresponding_variable=corresponding_nested_variable[i], + ) + i += 1 + def generate_optimization_objects( self, input_structure: OptimizationObject | List[OptimizationObject] ): @@ -170,6 +292,32 @@ def get_optimization_objects( ) -> TOptimizationObject | List[TOptimizationObject]: return self._variables + def set_initial_guess( + self, initial_guess: OptimizationObject | List[OptimizationObject] + ): + if isinstance(initial_guess, list): + if not isinstance(self._variables, list): + raise ValueError( + "The input guess is a list, but the specified variables structure is not" + ) + + i = 0 + for element in initial_guess: + if i >= len(self._variables): + raise ValueError( + "The input guess is a list, but the specified variables structure is not" + ) + + self._set_initial_guess_internal( + initial_guess=element, corresponding_variable=self._variables[i] + ) + i += 1 + return + + self._set_initial_guess_internal( + initial_guess=initial_guess, corresponding_variable=self._variables + ) + def set_opti_options( self, inner_solver: str = None, @@ -201,7 +349,6 @@ def get_cost_value(self) -> float | None: return self._output_cost def add_cost(self, input_cost: cs.MX): - # TODO Stefano: Check if it is a constraint. If is an equality, add the 2-norm. If it is an inequality? if self._cost is None: self._cost = input_cost return @@ -209,7 +356,6 @@ def add_cost(self, input_cost: cs.MX): self._cost += input_cost def add_constraint(self, input_constraint: cs.MX): - # TODO Stefano: Check if it is a cost. If so, set it equal to zero self._solver.subject_to(input_constraint) def cost_function(self) -> cs.MX: diff --git a/src/hippopt/base/optimization_problem.py b/src/hippopt/base/optimization_problem.py index 4934a31b..c70777fa 100644 --- a/src/hippopt/base/optimization_problem.py +++ b/src/hippopt/base/optimization_problem.py @@ -22,7 +22,7 @@ class OptimizationProblem(abc.ABC): _solver: OptimizationSolver = dataclasses.field(default_factory=OptiSolver) def generate_optimization_objects( - self, input_structure: OptimizationObject | List[Type[OptimizationObject]] + self, input_structure: OptimizationObject | List[TOptimizationObject] ) -> TOptimizationObject | List[TOptimizationObject]: return self._solver.generate_optimization_objects( input_structure=input_structure @@ -35,10 +35,14 @@ def add_expression( for expr in expression: self.add_expression(mode, expr) else: + assert isinstance(expression, cs.MX) match mode: case ExpressionType.subject_to: + # TODO Stefano: Check if it is a cost. If so, set it equal to zero self._solver.add_constraint(expression) case ExpressionType.minimize: + # TODO Stefano: Check if it is a constraint. If is an equality, add the 2-norm. + # If it is an inequality? self._solver.add_cost(expression) case _: pass diff --git a/src/hippopt/base/optimization_solver.py b/src/hippopt/base/optimization_solver.py index 2eaf4d92..0628fb0c 100644 --- a/src/hippopt/base/optimization_solver.py +++ b/src/hippopt/base/optimization_solver.py @@ -17,6 +17,12 @@ def generate_optimization_objects( ): pass + @abc.abstractmethod + def set_initial_guess( + self, initial_guess: OptimizationObject | List[OptimizationObject] + ): + pass + @abc.abstractmethod def solve(self) -> Tuple[OptimizationObject, float]: pass diff --git a/test/test_base.py b/test/test_base.py index 1846b7ac..5e5438a4 100644 --- a/test/test_base.py +++ b/test/test_base.py @@ -111,7 +111,9 @@ def test_generate_objects(): def test_generate_objects_list(): - test_var_list = [AggregateClass()] * 2 + test_var_list = [] + for _ in range(2): + test_var_list.append(AggregateClass()) solver = OptiSolver() opti_var_list = solver.generate_optimization_objects(test_var_list) assert len(opti_var_list) == 2 diff --git a/test/test_optimization_problem.py b/test/test_optimization_problem.py index d1991f4d..7254a952 100644 --- a/test/test_optimization_problem.py +++ b/test/test_optimization_problem.py @@ -8,6 +8,7 @@ ExpressionType, OptimizationObject, OptimizationProblem, + Parameter, StorageType, Variable, default_storage_field, @@ -63,5 +64,127 @@ def test_opti_solver(): assert problem.solver().get_cost_value() == pytest.approx(expected_cost) -# TODO: Stefano test setting of initial condition and of parameters -# TODO: Stefano add test with list of variables +@dataclasses.dataclass +class TestVarAndPar(OptimizationObject): + composite: TestVar = dataclasses.field(default_factory=TestVar) + parameter: StorageType = default_storage_field(Parameter) + + def __post_init__(self): + self.parameter = np.zeros(3) + + +def test_opti_solver_with_parameters(): + problem = OptimizationProblem() + initial_guess = TestVarAndPar() + var = problem.generate_optimization_objects(input_structure=TestVarAndPar()) + np.random.seed(123) + a = 10.0 * np.random.rand(3) + 0.01 + b = 20.0 * np.random.rand(3) - 10.0 + c = 20.0 * np.random.rand(3) - 10.0 + + initial_guess.parameter = c + + problem.add_expression( + mode=ExpressionType.minimize, + expression=( + a[k] * cs.power(var.composite.variable[k], 2) + + b[k] * var.composite.variable[k] + for k in range(0, 3) + ), + ) + + problem.add_expression( + mode=ExpressionType.subject_to, + expression=( # noqa + var.composite.variable[k] >= var.parameter[k] for k in range(3) + ), + ) + + problem.solver().set_initial_guess(initial_guess=initial_guess) + + output, cost_value = problem.solver().solve() + + expected_x = np.zeros(3) + expected_cost = 0 + for i in range(3): + expected = -b[i] / (2 * a[i]) + expected_x[i] = expected if expected >= c[i] else c[i] + expected_cost += ( + -b[i] ** 2 / (4 * a[i]) + if expected >= c[i] + else a[i] * (c[i] ** 2) + b[i] * c[i] + ) + + assert output.composite.variable == pytest.approx(expected_x) # noqa + assert cost_value == pytest.approx(expected_cost) + assert output.parameter == pytest.approx(c) # noqa + + assert problem.solver().get_solution().composite.variable == pytest.approx( # noqa + expected_x + ) # noqa + assert problem.solver().get_cost_value() == pytest.approx(expected_cost) + + +def test_opti_solver_with_parameters_and_lists(): + problem = OptimizationProblem() + initial_guess = [] + for _ in range(3): + initial_guess.append(TestVarAndPar()) + + var = problem.generate_optimization_objects(input_structure=initial_guess) + np.random.seed(123) + + a = [] + b = [] + c = [] + + for j in range(len(initial_guess)): + a.append(10.0 * np.random.rand(3) + 0.01) + b.append(20.0 * np.random.rand(3) - 10.0) + c.append(20.0 * np.random.rand(3) - 10.0) + initial_guess[j].parameter = c[j] + + problem.add_expression( + mode=ExpressionType.minimize, + expression=( + a[j][k] * cs.power(var[j].composite.variable[k], 2) + + b[j][k] * var[j].composite.variable[k] + for j in range(len(initial_guess)) + for k in range(0, 3) + ), + ) + + problem.add_expression( + mode=ExpressionType.subject_to, + expression=( # noqa + var[j].composite.variable[k] >= c[j][k] + for j in range(len(initial_guess)) + for k in range(3) + ), + ) + + problem.solver().set_initial_guess(initial_guess=initial_guess) + + output, cost_value = problem.solver().solve() + + expected_x = np.zeros(3) + expected_cost = 0 + for i in range(len(initial_guess)): + for j in range(3): + expected = -b[i][j] / (2 * a[i][j]) + expected_x[j] = expected if expected >= c[i][j] else c[i][j] + expected_cost += ( + -b[i][j] ** 2 / (4 * a[i][j]) + if expected >= c[i][j] + else a[i][j] * (c[i][j] ** 2) + b[i][j] * c[i][j] + ) + + assert output[i].composite.variable == pytest.approx(expected_x) # noqa + assert output[i].parameter == pytest.approx(c[i]) # noqa + + assert cost_value == pytest.approx(expected_cost) + assert problem.solver().get_cost_value() == pytest.approx(expected_cost) + + +# TODO: Check better where to use TOptimizationObject. +# It should be used to match input and output type, or not to cast it From 3302969680f20f94346ab7f2dea5c9f2b6bd8f2f Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 17 Mar 2023 18:37:55 +0100 Subject: [PATCH 10/26] Renamed Variable in ContinuousVariable --- src/hippopt/__init__.py | 2 +- src/hippopt/base/__init__.py | 2 +- src/hippopt/base/{variable.py => continuous_variable.py} | 6 +++--- src/hippopt/base/opti_solver.py | 8 ++++---- test/test_base.py | 6 +++--- test/test_optimization_problem.py | 4 ++-- 6 files changed, 14 insertions(+), 14 deletions(-) rename src/hippopt/base/{variable.py => continuous_variable.py} (57%) diff --git a/src/hippopt/__init__.py b/src/hippopt/__init__.py index b4c0b8a2..e1632b3d 100644 --- a/src/hippopt/__init__.py +++ b/src/hippopt/__init__.py @@ -1,4 +1,5 @@ from . import base +from .base.continuous_variable import ContinuousVariable, TContinuousVariable from .base.opti_solver import OptiSolver from .base.optimization_object import ( OptimizationObject, @@ -8,4 +9,3 @@ ) from .base.optimization_problem import ExpressionType, OptimizationProblem from .base.parameter import Parameter, TParameter -from .base.variable import TVariable, Variable diff --git a/src/hippopt/base/__init__.py b/src/hippopt/base/__init__.py index 5498a41f..89b9c61d 100644 --- a/src/hippopt/base/__init__.py +++ b/src/hippopt/base/__init__.py @@ -1,7 +1,7 @@ from . import ( + continuous_variable, opti_solver, optimization_object, optimization_problem, parameter, - variable, ) diff --git a/src/hippopt/base/variable.py b/src/hippopt/base/continuous_variable.py similarity index 57% rename from src/hippopt/base/variable.py rename to src/hippopt/base/continuous_variable.py index 64912985..c1f85fb6 100644 --- a/src/hippopt/base/variable.py +++ b/src/hippopt/base/continuous_variable.py @@ -3,12 +3,12 @@ from hippopt.base.optimization_object import OptimizationObject -TVariable = TypeVar("TVariable", bound="Variable") +TContinuousVariable = TypeVar("TContinuousVariable", bound="ContinuousVariable") @dataclasses.dataclass -class Variable(OptimizationObject): +class ContinuousVariable(OptimizationObject): """""" - StorageType: ClassVar[str] = "variable" + StorageType: ClassVar[str] = "continuous_variable" StorageTypeMetadata: ClassVar[dict[str, Any]] = dict(StorageType=StorageType) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index fc78506d..6d184425 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -5,10 +5,10 @@ import casadi as cs import numpy as np +from hippopt.base.continuous_variable import ContinuousVariable from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject from hippopt.base.optimization_solver import OptimizationSolver from hippopt.base.parameter import Parameter -from hippopt.base.variable import Variable @dataclasses.dataclass @@ -48,7 +48,7 @@ def _generate_objects_from_instance( if ( has_storage_field and field.metadata[OptimizationObject.StorageTypeField] - == Variable.StorageType + == ContinuousVariable.StorageType ): value = output.__dict__[field.name] value = ( @@ -129,7 +129,7 @@ def _generate_solution_output( if has_storage_field and ( ( field.metadata[OptimizationObject.StorageTypeField] - == Variable.StorageType + == ContinuousVariable.StorageType ) or ( field.metadata[OptimizationObject.StorageTypeField] @@ -168,7 +168,7 @@ def _set_initial_guess_internal( if ( has_storage_field and field.metadata[OptimizationObject.StorageTypeField] - == Variable.StorageType + == ContinuousVariable.StorageType ): guess = initial_guess.__dict__[field.name] diff --git a/test/test_base.py b/test/test_base.py index 5e5438a4..af59dd4f 100644 --- a/test/test_base.py +++ b/test/test_base.py @@ -4,19 +4,19 @@ import numpy as np from hippopt import ( + ContinuousVariable, OptimizationObject, OptiSolver, Parameter, StorageType, TOptimizationObject, - Variable, default_storage_field, ) @dataclasses.dataclass class TestVariable(OptimizationObject): - storage: StorageType = default_storage_field(cls=Variable) + storage: StorageType = default_storage_field(cls=ContinuousVariable) def __post_init__(self): self.storage = np.ones(shape=3) @@ -46,7 +46,7 @@ def test_zero_parameter(): @dataclasses.dataclass class CustomInitializationVariable(OptimizationObject): - variable: StorageType = default_storage_field(cls=Variable) + variable: StorageType = default_storage_field(cls=ContinuousVariable) parameter: StorageType = default_storage_field(cls=Parameter) def __post_init__(self): diff --git a/test/test_optimization_problem.py b/test/test_optimization_problem.py index 7254a952..8ddda80f 100644 --- a/test/test_optimization_problem.py +++ b/test/test_optimization_problem.py @@ -5,19 +5,19 @@ import pytest from hippopt import ( + ContinuousVariable, ExpressionType, OptimizationObject, OptimizationProblem, Parameter, StorageType, - Variable, default_storage_field, ) @dataclasses.dataclass class TestVar(OptimizationObject): - variable: StorageType = default_storage_field(Variable) + variable: StorageType = default_storage_field(ContinuousVariable) def __post_init__(self): self.variable = np.zeros(3) From f888f410385281a3f7e8133c51857cd333f8b5c6 Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 17 Mar 2023 18:53:42 +0100 Subject: [PATCH 11/26] Better use of typing objects --- src/hippopt/base/opti_solver.py | 30 ++++++++++++------------ src/hippopt/base/optimization_problem.py | 8 +++---- src/hippopt/base/optimization_solver.py | 10 ++++---- test/test_optimization_problem.py | 20 +++++++--------- 4 files changed, 32 insertions(+), 36 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index 6d184425..bc7a3e95 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -15,7 +15,7 @@ class OptiSolver(OptimizationSolver): DefaultSolverType: ClassVar[str] = "ipopt" _inner_solver: str = dataclasses.field(default=DefaultSolverType) - _problem_type: dataclasses.InitVar[str] = dataclasses.field(default="nlp") + problem_type: dataclasses.InitVar[str] = dataclasses.field(default="nlp") _options_plugin: dict[str, Any] = dataclasses.field(default_factory=dict) _options_solver: dict[str, Any] = dataclasses.field(default_factory=dict) @@ -31,15 +31,15 @@ class OptiSolver(OptimizationSolver): default=None ) - def __post_init__(self, _problem_type: str) -> None: - self._solver = cs.Opti(_problem_type) + def __post_init__(self, problem_type: str) -> None: + self._solver = cs.Opti(problem_type) self._solver.solver( self._inner_solver, self._options_plugin, self._options_solver ) def _generate_objects_from_instance( - self, input_structure: OptimizationObject - ) -> OptimizationObject: + self, input_structure: TOptimizationObject + ) -> TOptimizationObject: output = copy.deepcopy(input_structure) for field in dataclasses.fields(output): @@ -92,8 +92,8 @@ def _generate_objects_from_instance( return output def _generate_objects_from_list( - self, input_structure: list - ) -> List[OptimizationObject]: + self, input_structure: List[TOptimizationObject] + ) -> List[TOptimizationObject]: list_of_optimization_objects = isinstance(input_structure, list) and all( isinstance(elem, OptimizationObject) for elem in input_structure ) @@ -111,8 +111,8 @@ def _generate_objects_from_list( return output def _generate_solution_output( - self, variables: OptimizationObject | List[OptimizationObject] - ) -> OptimizationObject | List[OptimizationObject]: + self, variables: TOptimizationObject | List[TOptimizationObject] + ) -> TOptimizationObject | List[TOptimizationObject]: output = copy.deepcopy(variables) if isinstance(variables, list): @@ -159,8 +159,8 @@ def _generate_solution_output( def _set_initial_guess_internal( self, - initial_guess: OptimizationObject, - corresponding_variable: OptimizationObject, + initial_guess: TOptimizationObject, + corresponding_variable: TOptimizationObject, ): for field in dataclasses.fields(initial_guess): has_storage_field = OptimizationObject.StorageTypeField in field.metadata @@ -281,7 +281,7 @@ def _set_initial_guess_internal( i += 1 def generate_optimization_objects( - self, input_structure: OptimizationObject | List[OptimizationObject] + self, input_structure: TOptimizationObject | List[TOptimizationObject] ): if isinstance(input_structure, OptimizationObject): return self._generate_objects_from_instance(input_structure=input_structure) @@ -293,7 +293,7 @@ def get_optimization_objects( return self._variables def set_initial_guess( - self, initial_guess: OptimizationObject | List[OptimizationObject] + self, initial_guess: TOptimizationObject | List[TOptimizationObject] ): if isinstance(initial_guess, list): if not isinstance(self._variables, list): @@ -335,14 +335,14 @@ def set_opti_options( self._inner_solver, self._options_plugin, self._options_solver ) - def solve(self) -> Tuple[OptimizationObject, float]: + def solve(self) -> Tuple[TOptimizationObject, float]: self._solver.minimize(self._cost) self._opti_solution = self._solver.solve() self._output_cost = self._opti_solution.value(self._cost) self._output_solution = self._generate_solution_output(self._variables) return self._output_solution, self._output_cost - def get_solution(self) -> OptimizationObject | List[OptimizationObject] | None: + def get_solution(self) -> TOptimizationObject | List[TOptimizationObject] | None: return self._output_solution def get_cost_value(self) -> float | None: diff --git a/src/hippopt/base/optimization_problem.py b/src/hippopt/base/optimization_problem.py index c70777fa..ef89801d 100644 --- a/src/hippopt/base/optimization_problem.py +++ b/src/hippopt/base/optimization_problem.py @@ -2,13 +2,13 @@ import dataclasses import types from enum import Enum -from typing import Generator, List, Type +from typing import Generator, List import casadi as cs from hippopt.base.opti_solver import OptiSolver from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject -from hippopt.base.optimization_solver import OptimizationSolver +from hippopt.base.optimization_solver import TOptimizationSolver class ExpressionType(Enum): @@ -19,7 +19,7 @@ class ExpressionType(Enum): @dataclasses.dataclass class OptimizationProblem(abc.ABC): - _solver: OptimizationSolver = dataclasses.field(default_factory=OptiSolver) + _solver: TOptimizationSolver = dataclasses.field(default_factory=OptiSolver) def generate_optimization_objects( self, input_structure: OptimizationObject | List[TOptimizationObject] @@ -47,5 +47,5 @@ def add_expression( case _: pass - def solver(self) -> OptimizationSolver: + def solver(self) -> TOptimizationSolver: return self._solver diff --git a/src/hippopt/base/optimization_solver.py b/src/hippopt/base/optimization_solver.py index 0628fb0c..2ce790dc 100644 --- a/src/hippopt/base/optimization_solver.py +++ b/src/hippopt/base/optimization_solver.py @@ -4,7 +4,7 @@ import casadi as cs -from hippopt.base.optimization_object import OptimizationObject +from hippopt.base.optimization_object import TOptimizationObject TOptimizationSolver = TypeVar("TOptimizationSolver", bound="OptimizationSolver") @@ -13,22 +13,22 @@ class OptimizationSolver(abc.ABC): @abc.abstractmethod def generate_optimization_objects( - self, input_structure: OptimizationObject | List[OptimizationObject] + self, input_structure: TOptimizationObject | List[TOptimizationObject] ): pass @abc.abstractmethod def set_initial_guess( - self, initial_guess: OptimizationObject | List[OptimizationObject] + self, initial_guess: TOptimizationObject | List[TOptimizationObject] ): pass @abc.abstractmethod - def solve(self) -> Tuple[OptimizationObject, float]: + def solve(self) -> Tuple[TOptimizationObject, float]: pass @abc.abstractmethod - def get_solution(self) -> OptimizationObject | List[OptimizationObject] | None: + def get_solution(self) -> TOptimizationObject | List[TOptimizationObject] | None: pass @abc.abstractmethod diff --git a/test/test_optimization_problem.py b/test/test_optimization_problem.py index 8ddda80f..7683866f 100644 --- a/test/test_optimization_problem.py +++ b/test/test_optimization_problem.py @@ -57,10 +57,10 @@ def test_opti_solver(): else a[i] * (c[i] ** 2) + b[i] * c[i] ) - assert output.variable == pytest.approx(expected_x) # noqa + assert output.variable == pytest.approx(expected_x) assert cost_value == pytest.approx(expected_cost) - assert problem.solver().get_solution().variable == pytest.approx(expected_x) # noqa + assert problem.solver().get_solution().variable == pytest.approx(expected_x) assert problem.solver().get_cost_value() == pytest.approx(expected_cost) @@ -115,13 +115,13 @@ def test_opti_solver_with_parameters(): else a[i] * (c[i] ** 2) + b[i] * c[i] ) - assert output.composite.variable == pytest.approx(expected_x) # noqa + assert output.composite.variable == pytest.approx(expected_x) assert cost_value == pytest.approx(expected_cost) - assert output.parameter == pytest.approx(c) # noqa + assert output.parameter == pytest.approx(c) - assert problem.solver().get_solution().composite.variable == pytest.approx( # noqa + assert problem.solver().get_solution().composite.variable == pytest.approx( expected_x - ) # noqa + ) assert problem.solver().get_cost_value() == pytest.approx(expected_cost) @@ -179,12 +179,8 @@ def test_opti_solver_with_parameters_and_lists(): else a[i][j] * (c[i][j] ** 2) + b[i][j] * c[i][j] ) - assert output[i].composite.variable == pytest.approx(expected_x) # noqa - assert output[i].parameter == pytest.approx(c[i]) # noqa + assert output[i].composite.variable == pytest.approx(expected_x) + assert output[i].parameter == pytest.approx(c[i]) assert cost_value == pytest.approx(expected_cost) assert problem.solver().get_cost_value() == pytest.approx(expected_cost) - - -# TODO: Check better where to use TOptimizationObject. -# It should be used to match input and output type, or not to cast it From 3a1daa02b67ca14e11367ff10a66b1a9db81dcfb Mon Sep 17 00:00:00 2001 From: Stefano Date: Mon, 20 Mar 2023 15:01:10 +0100 Subject: [PATCH 12/26] Added possibility to convert expressions from cost to constraints. --- src/hippopt/base/optimization_problem.py | 32 +++++++++-- test/test_optimization_problem.py | 70 ++++++++++++++++++++++++ 2 files changed, 96 insertions(+), 6 deletions(-) diff --git a/src/hippopt/base/optimization_problem.py b/src/hippopt/base/optimization_problem.py index ef89801d..33b96d79 100644 --- a/src/hippopt/base/optimization_problem.py +++ b/src/hippopt/base/optimization_problem.py @@ -29,7 +29,10 @@ def generate_optimization_objects( ) def add_expression( - self, mode: ExpressionType, expression: cs.MX | Generator[cs.MX, None, None] + self, + mode: ExpressionType, + expression: cs.MX | Generator[cs.MX, None, None], + expected_value: float = 0.0, ): if isinstance(expression, types.GeneratorType): for expr in expression: @@ -38,12 +41,29 @@ def add_expression( assert isinstance(expression, cs.MX) match mode: case ExpressionType.subject_to: - # TODO Stefano: Check if it is a cost. If so, set it equal to zero - self._solver.add_constraint(expression) + if ( + expression.is_op(cs.OP_LE) + or expression.is_op(cs.OP_LT) + or expression.is_op(cs.OP_EQ) + ): + self._solver.add_constraint(expression) + else: + if not expression.is_scalar(): + raise ValueError("The input expression is not supported.") + self._solver.add_constraint( + expression == expected_value # noqa + ) + case ExpressionType.minimize: - # TODO Stefano: Check if it is a constraint. If is an equality, add the 2-norm. - # If it is an inequality? - self._solver.add_cost(expression) + if expression.is_op(cs.OP_LE) or expression.is_op(cs.OP_LT): + raise ValueError( + "The conversion from an inequality to a cost is not yet supported" + ) + if expression.is_op(cs.OP_EQ): + error_expr = expression.dep(0) - expression.dep(1) + self._solver.add_cost(cs.sumsqr(error_expr)) + else: + self._solver.add_cost(expression) case _: pass diff --git a/test/test_optimization_problem.py b/test/test_optimization_problem.py index 7683866f..c203c5f9 100644 --- a/test/test_optimization_problem.py +++ b/test/test_optimization_problem.py @@ -184,3 +184,73 @@ def test_opti_solver_with_parameters_and_lists(): assert cost_value == pytest.approx(expected_cost) assert problem.solver().get_cost_value() == pytest.approx(expected_cost) + + +@dataclasses.dataclass +class SwitchVar(OptimizationObject): + x: StorageType = default_storage_field(ContinuousVariable) + y: StorageType = default_storage_field(ContinuousVariable) + + def __post_init__(self): + self.x = np.zeros(1) + self.y = np.zeros(1) + + +def test_switch_costs(): + initial_problem = OptimizationProblem() + variables = initial_problem.generate_optimization_objects(SwitchVar()) + a = 10 + initial_problem.add_expression(ExpressionType.minimize, variables.x * variables.x) + initial_problem.add_expression( + ExpressionType.minimize, a * variables.y * variables.y + ) + initial_problem.add_expression( + ExpressionType.subject_to, variables.x + variables.y == a - 1 + ) # noqa + output, cost_value = initial_problem.solver().solve() + expected_cost = a + (a - 2) ** 2 + assert cost_value == pytest.approx(expected=expected_cost, rel=0.1) + assert output.x == pytest.approx(a - 2, rel=0.1) + + new_problem = OptimizationProblem() + new_variables = new_problem.generate_optimization_objects(SwitchVar()) + new_problem.add_expression( + ExpressionType.minimize, a * new_variables.y * new_variables.y + ) + new_problem.add_expression( + ExpressionType.subject_to, new_variables.x + new_variables.y == a - 1 + ) # noqa + new_problem.add_expression( + ExpressionType.subject_to, new_variables.x * new_variables.x + ) + output, cost_value = new_problem.solver().solve() + expected_cost = a * (a - 1) ** 2 + assert cost_value == pytest.approx(expected=expected_cost, rel=0.1) + assert output.x == pytest.approx(0, abs=1e-4) + + +def test_switch_constraints(): + initial_problem = OptimizationProblem() + variables = initial_problem.generate_optimization_objects(SwitchVar()) + a = 10 + initial_problem.add_expression(ExpressionType.minimize, (variables.x - 5) ** 2) + initial_problem.add_expression( + ExpressionType.minimize, a * variables.y * variables.y + ) + initial_problem.add_expression( + ExpressionType.subject_to, variables.x + variables.y == a - 1 + ) # noqa + initial_output, initial_cost_value = initial_problem.solver().solve() + + new_problem = OptimizationProblem() + new_variables = new_problem.generate_optimization_objects(SwitchVar()) + new_problem.add_expression( + ExpressionType.minimize, a * new_variables.y * new_variables.y + ) + new_problem.add_expression( + ExpressionType.subject_to, new_variables.x + new_variables.y == a - 1 + ) # noqa + new_problem.add_expression(ExpressionType.minimize, new_variables.x == 5) + output, cost_value = new_problem.solver().solve() + assert cost_value == pytest.approx(expected=initial_cost_value, rel=0.1) + assert output.x == pytest.approx(initial_output.x) From 7fb52565ec046b5b39ce3840125f8cfc984f7b49 Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 24 Mar 2023 16:01:53 +0100 Subject: [PATCH 13/26] Improved definition of OptimizationSolver interface. It was missing a return type and a decorator --- src/hippopt/base/optimization_solver.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/hippopt/base/optimization_solver.py b/src/hippopt/base/optimization_solver.py index 2ce790dc..4dfaa518 100644 --- a/src/hippopt/base/optimization_solver.py +++ b/src/hippopt/base/optimization_solver.py @@ -13,8 +13,8 @@ class OptimizationSolver(abc.ABC): @abc.abstractmethod def generate_optimization_objects( - self, input_structure: TOptimizationObject | List[TOptimizationObject] - ): + self, input_structure: TOptimizationObject | List[TOptimizationObject], **kwargs + ) -> TOptimizationObject | List[TOptimizationObject]: pass @abc.abstractmethod @@ -43,5 +43,6 @@ def add_cost(self, input_cost: cs.MX): def add_constraint(self, input_constraint: cs.MX): pass + @abc.abstractmethod def cost_function(self) -> cs.MX: pass From a6f2b06900f00fbbe3f4c73ee09da8d71bd9ed02 Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 24 Mar 2023 16:02:45 +0100 Subject: [PATCH 14/26] Added possibility to change the solver in OptimizationProblem --- src/hippopt/base/optimization_problem.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/hippopt/base/optimization_problem.py b/src/hippopt/base/optimization_problem.py index 33b96d79..4333ed5b 100644 --- a/src/hippopt/base/optimization_problem.py +++ b/src/hippopt/base/optimization_problem.py @@ -8,7 +8,7 @@ from hippopt.base.opti_solver import OptiSolver from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject -from hippopt.base.optimization_solver import TOptimizationSolver +from hippopt.base.optimization_solver import OptimizationSolver, TOptimizationSolver class ExpressionType(Enum): @@ -19,7 +19,17 @@ class ExpressionType(Enum): @dataclasses.dataclass class OptimizationProblem(abc.ABC): - _solver: TOptimizationSolver = dataclasses.field(default_factory=OptiSolver) + optimization_solver: dataclasses.InitVar[OptimizationSolver] = dataclasses.field( + default=None + ) + _solver: TOptimizationSolver = dataclasses.field(default=None) + + def __post_init__(self, optimization_solver: TOptimizationSolver = None): + self._solver = ( + optimization_solver + if isinstance(optimization_solver, OptimizationSolver) + else OptiSolver() + ) def generate_optimization_objects( self, input_structure: OptimizationObject | List[TOptimizationObject] From c181f0e4dcafc2d7c6e55e91cd753652e6a1997b Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 24 Mar 2023 16:04:10 +0100 Subject: [PATCH 15/26] Checking the size of the guess fields in OptiSolver. --- src/hippopt/base/opti_solver.py | 70 +++++++++++++++++++++++++-------- 1 file changed, 54 insertions(+), 16 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index bc7a3e95..ca985c7d 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -51,11 +51,17 @@ def _generate_objects_from_instance( == ContinuousVariable.StorageType ): value = output.__dict__[field.name] - value = ( - value - if not isinstance(value, np.ndarray) - else np.expand_dims(value, axis=1) - ) + + if isinstance(value, np.ndarray): + if value.ndim > 2: + raise ValueError( + "Field " + + field.name + + " has number of dimensions greater than 2." + ) + if value.ndim < 2: + value = np.expand_dims(value, axis=1) + output.__setattr__(field.name, self._solver.variable(*value.shape)) continue @@ -65,11 +71,17 @@ def _generate_objects_from_instance( == Parameter.StorageType ): value = output.__dict__[field.name] - value = ( - value - if not isinstance(value, np.ndarray) - else np.expand_dims(value, axis=1) - ) + + if isinstance(value, np.ndarray): + if value.ndim > 2: + raise ValueError( + "Field " + + field.name + + " has number of dimensions greater than 2." + ) + if value.ndim < 2: + value = np.expand_dims(value, axis=1) + output.__setattr__(field.name, self._solver.parameter(*value.shape)) continue @@ -189,9 +201,22 @@ def _set_initial_guess_internal( + " but it is not present in the optimization variables" ) - self._solver.set_initial( - corresponding_variable.__getattribute__(field.name), guess + corresponding_variable_value = corresponding_variable.__getattribute__( + field.name + ) + + input_shape = ( + guess.shape if len(guess.shape) > 1 else (guess.shape[0], 1) ) + + if corresponding_variable_value.shape != input_shape: + raise ValueError( + "The guess has the field " + + field.name + + " but its dimension does not match with the corresponding optimization variable" + ) + + self._solver.set_initial(corresponding_variable_value, guess) continue if ( @@ -218,9 +243,22 @@ def _set_initial_guess_internal( + " but it is not present in the optimization parameters" ) - self._solver.set_value( - corresponding_variable.__getattribute__(field.name), guess + corresponding_parameter_value = corresponding_variable.__getattribute__( + field.name + ) + + input_shape = ( + guess.shape if len(guess.shape) > 1 else (guess.shape[0], 1) ) + + if corresponding_parameter_value.shape != input_shape: + raise ValueError( + "The guess has the field " + + field.name + + " but its dimension does not match with the corresponding optimization variable" + ) + + self._solver.set_value(corresponding_parameter_value, guess) continue composite_variable_guess = initial_guess.__getattribute__(field.name) @@ -281,8 +319,8 @@ def _set_initial_guess_internal( i += 1 def generate_optimization_objects( - self, input_structure: TOptimizationObject | List[TOptimizationObject] - ): + self, input_structure: TOptimizationObject | List[TOptimizationObject], **kwargs + ) -> TOptimizationObject | List[TOptimizationObject]: if isinstance(input_structure, OptimizationObject): return self._generate_objects_from_instance(input_structure=input_structure) return self._generate_objects_from_list(input_structure=input_structure) From 109859cd371d860eb18993ae3bedd249d5e4ca2c Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 24 Mar 2023 16:25:30 +0100 Subject: [PATCH 16/26] Added methods to directly add costs and constraints. --- src/hippopt/base/optimization_problem.py | 66 ++++++++++++++++-------- test/test_optimization_problem.py | 32 ++++++------ 2 files changed, 59 insertions(+), 39 deletions(-) diff --git a/src/hippopt/base/optimization_problem.py b/src/hippopt/base/optimization_problem.py index 4333ed5b..45203c4f 100644 --- a/src/hippopt/base/optimization_problem.py +++ b/src/hippopt/base/optimization_problem.py @@ -38,11 +38,52 @@ def generate_optimization_objects( input_structure=input_structure ) + def add_cost( + self, + expression: cs.MX | Generator[cs.MX, None, None], + scaling: float | cs.MX = 1.0, + ): + if isinstance(expression, types.GeneratorType): + for expr in expression: + self.add_cost(expr, scaling) + else: + assert isinstance(expression, cs.MX) + if expression.is_op(cs.OP_LE) or expression.is_op(cs.OP_LT): + raise ValueError( + "The conversion from an inequality to a cost is not yet supported" + ) + if expression.is_op(cs.OP_EQ): + error_expr = expression.dep(0) - expression.dep(1) + self._solver.add_cost(scaling * cs.sumsqr(error_expr)) + else: + self._solver.add_cost(scaling * expression) # noqa + + def add_constraint( + self, + expression: cs.MX | Generator[cs.MX, None, None], + expected_value: float | cs.MX = 0.0, + ): + if isinstance(expression, types.GeneratorType): + for expr in expression: + self.add_constraint(expr, expected_value) + else: + assert isinstance(expression, cs.MX) + if ( + expression.is_op(cs.OP_LE) + or expression.is_op(cs.OP_LT) + or expression.is_op(cs.OP_EQ) + ): + self._solver.add_constraint(expression) + else: + if not expression.is_scalar(): + raise ValueError("The input expression is not supported.") + self._solver.add_constraint(expression == expected_value) # noqa + def add_expression( self, mode: ExpressionType, expression: cs.MX | Generator[cs.MX, None, None], - expected_value: float = 0.0, + **kwargs, ): if isinstance(expression, types.GeneratorType): for expr in expression: @@ -51,29 +92,10 @@ def add_expression( assert isinstance(expression, cs.MX) match mode: case ExpressionType.subject_to: - if ( - expression.is_op(cs.OP_LE) - or expression.is_op(cs.OP_LT) - or expression.is_op(cs.OP_EQ) - ): - self._solver.add_constraint(expression) - else: - if not expression.is_scalar(): - raise ValueError("The input expression is not supported.") - self._solver.add_constraint( - expression == expected_value # noqa - ) + self.add_constraint(expression, **kwargs) case ExpressionType.minimize: - if expression.is_op(cs.OP_LE) or expression.is_op(cs.OP_LT): - raise ValueError( - "The conversion from an inequality to a cost is not yet supported" - ) - if expression.is_op(cs.OP_EQ): - error_expr = expression.dep(0) - expression.dep(1) - self._solver.add_cost(cs.sumsqr(error_expr)) - else: - self._solver.add_cost(expression) + self.add_cost(expression, **kwargs) case _: pass diff --git a/test/test_optimization_problem.py b/test/test_optimization_problem.py index c203c5f9..1b0a22c9 100644 --- a/test/test_optimization_problem.py +++ b/test/test_optimization_problem.py @@ -144,23 +144,17 @@ def test_opti_solver_with_parameters_and_lists(): c.append(20.0 * np.random.rand(3) - 10.0) initial_guess[j].parameter = c[j] - problem.add_expression( - mode=ExpressionType.minimize, - expression=( - a[j][k] * cs.power(var[j].composite.variable[k], 2) - + b[j][k] * var[j].composite.variable[k] - for j in range(len(initial_guess)) - for k in range(0, 3) - ), + problem.add_cost( + a[j][k] * cs.power(var[j].composite.variable[k], 2) + + b[j][k] * var[j].composite.variable[k] + for j in range(len(initial_guess)) + for k in range(0, 3) ) - problem.add_expression( - mode=ExpressionType.subject_to, - expression=( # noqa - var[j].composite.variable[k] >= c[j][k] - for j in range(len(initial_guess)) - for k in range(3) - ), + problem.add_constraint( + var[j].composite.variable[k] >= c[j][k] # noqa + for j in range(len(initial_guess)) + for k in range(3) ) problem.solver().set_initial_guess(initial_guess=initial_guess) @@ -221,7 +215,9 @@ def test_switch_costs(): ExpressionType.subject_to, new_variables.x + new_variables.y == a - 1 ) # noqa new_problem.add_expression( - ExpressionType.subject_to, new_variables.x * new_variables.x + ExpressionType.subject_to, + new_variables.x * new_variables.x + 1, + expected_value=1, ) output, cost_value = new_problem.solver().solve() expected_cost = a * (a - 1) ** 2 @@ -250,7 +246,9 @@ def test_switch_constraints(): new_problem.add_expression( ExpressionType.subject_to, new_variables.x + new_variables.y == a - 1 ) # noqa - new_problem.add_expression(ExpressionType.minimize, new_variables.x == 5) + new_problem.add_expression( + ExpressionType.minimize, new_variables.x == 5, scaling=1.0 + ) output, cost_value = new_problem.solver().solve() assert cost_value == pytest.approx(expected=initial_cost_value, rel=0.1) assert output.x == pytest.approx(initial_output.x) From fd41fc51a05793587d55dd4e0fb96f85d751c50f Mon Sep 17 00:00:00 2001 From: Stefano Dafarra Date: Fri, 31 Mar 2023 11:57:18 +0200 Subject: [PATCH 17/26] Apply suggestions from code review Co-authored-by: Diego Ferigo --- src/hippopt/base/opti_solver.py | 2 +- src/hippopt/base/optimization_solver.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index ca985c7d..c63cda2f 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -173,7 +173,7 @@ def _set_initial_guess_internal( self, initial_guess: TOptimizationObject, corresponding_variable: TOptimizationObject, - ): + ) -> None: for field in dataclasses.fields(initial_guess): has_storage_field = OptimizationObject.StorageTypeField in field.metadata diff --git a/src/hippopt/base/optimization_solver.py b/src/hippopt/base/optimization_solver.py index 4dfaa518..c1fd63d9 100644 --- a/src/hippopt/base/optimization_solver.py +++ b/src/hippopt/base/optimization_solver.py @@ -20,7 +20,7 @@ def generate_optimization_objects( @abc.abstractmethod def set_initial_guess( self, initial_guess: TOptimizationObject | List[TOptimizationObject] - ): + ) -> None: pass @abc.abstractmethod @@ -36,11 +36,11 @@ def get_cost_value(self) -> float | None: pass @abc.abstractmethod - def add_cost(self, input_cost: cs.MX): + def add_cost(self, input_cost: cs.MX) -> None: pass @abc.abstractmethod - def add_constraint(self, input_constraint: cs.MX): + def add_constraint(self, input_constraint: cs.MX) -> None: pass @abc.abstractmethod From a93f2bb0eebfea1e33c89ecc58d2d66d39291437 Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 31 Mar 2023 12:14:28 +0200 Subject: [PATCH 18/26] Avoid to use __dict__ --- src/hippopt/base/opti_solver.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index c63cda2f..6b3bd264 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -50,7 +50,7 @@ def _generate_objects_from_instance( and field.metadata[OptimizationObject.StorageTypeField] == ContinuousVariable.StorageType ): - value = output.__dict__[field.name] + value = dataclasses.asdict(output)[field.name] if isinstance(value, np.ndarray): if value.ndim > 2: @@ -70,7 +70,7 @@ def _generate_objects_from_instance( and field.metadata[OptimizationObject.StorageTypeField] == Parameter.StorageType ): - value = output.__dict__[field.name] + value = dataclasses.asdict(output)[field.name] if isinstance(value, np.ndarray): if value.ndim > 2: @@ -148,7 +148,7 @@ def _generate_solution_output( == Parameter.StorageType ) ): - var = variables.__dict__[field.name] + var = dataclasses.asdict(variables)[field.name] output.__setattr__(field.name, self._opti_solution.value(var)) continue @@ -182,7 +182,7 @@ def _set_initial_guess_internal( and field.metadata[OptimizationObject.StorageTypeField] == ContinuousVariable.StorageType ): - guess = initial_guess.__dict__[field.name] + guess = dataclasses.asdict(initial_guess)[field.name] if guess is None: continue @@ -224,7 +224,7 @@ def _set_initial_guess_internal( and field.metadata[OptimizationObject.StorageTypeField] == Parameter.StorageType ): - guess = initial_guess.__dict__[field.name] + guess = dataclasses.asdict(initial_guess)[field.name] if guess is None: continue From 06e01fa5ca48237649830e5f18e4c12037064522 Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 31 Mar 2023 12:34:01 +0200 Subject: [PATCH 19/26] Set more output types. Avoid to use None for the output solution and cost. --- src/hippopt/base/opti_solver.py | 16 ++++++++++------ src/hippopt/base/optimization_problem.py | 6 +++--- src/hippopt/base/optimization_solver.py | 8 ++------ 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index 6b3bd264..d26857fe 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -332,7 +332,7 @@ def get_optimization_objects( def set_initial_guess( self, initial_guess: TOptimizationObject | List[TOptimizationObject] - ): + ) -> None: if isinstance(initial_guess, list): if not isinstance(self._variables, list): raise ValueError( @@ -361,7 +361,7 @@ def set_opti_options( inner_solver: str = None, options_plugin: dict[str, Any] = None, options_solver: dict[str, Any] = None, - ): + ) -> None: if inner_solver is not None: self._inner_solver = inner_solver if options_plugin is not None: @@ -380,20 +380,24 @@ def solve(self) -> Tuple[TOptimizationObject, float]: self._output_solution = self._generate_solution_output(self._variables) return self._output_solution, self._output_cost - def get_solution(self) -> TOptimizationObject | List[TOptimizationObject] | None: + def get_solution(self) -> TOptimizationObject | List[TOptimizationObject]: + if self._output_solution is None: + raise ValueError("There is no valid output yet") return self._output_solution - def get_cost_value(self) -> float | None: + def get_cost_value(self) -> float: + if self._output_cost is None: + raise ValueError("There is no valid output yet") return self._output_cost - def add_cost(self, input_cost: cs.MX): + def add_cost(self, input_cost: cs.MX) -> None: if self._cost is None: self._cost = input_cost return self._cost += input_cost - def add_constraint(self, input_constraint: cs.MX): + def add_constraint(self, input_constraint: cs.MX) -> None: self._solver.subject_to(input_constraint) def cost_function(self) -> cs.MX: diff --git a/src/hippopt/base/optimization_problem.py b/src/hippopt/base/optimization_problem.py index 45203c4f..d07cd548 100644 --- a/src/hippopt/base/optimization_problem.py +++ b/src/hippopt/base/optimization_problem.py @@ -42,7 +42,7 @@ def add_cost( self, expression: cs.MX | Generator[cs.MX, None, None], scaling: float | cs.MX = 1.0, - ): + ) -> None: if isinstance(expression, types.GeneratorType): for expr in expression: self.add_cost(expr, scaling) @@ -62,7 +62,7 @@ def add_constraint( self, expression: cs.MX | Generator[cs.MX, None, None], expected_value: float | cs.MX = 0.0, - ): + ) -> None: if isinstance(expression, types.GeneratorType): for expr in expression: self.add_constraint(expr, expected_value) @@ -84,7 +84,7 @@ def add_expression( mode: ExpressionType, expression: cs.MX | Generator[cs.MX, None, None], **kwargs, - ): + ) -> None: if isinstance(expression, types.GeneratorType): for expr in expression: self.add_expression(mode, expr) diff --git a/src/hippopt/base/optimization_solver.py b/src/hippopt/base/optimization_solver.py index c1fd63d9..5a861376 100644 --- a/src/hippopt/base/optimization_solver.py +++ b/src/hippopt/base/optimization_solver.py @@ -28,11 +28,11 @@ def solve(self) -> Tuple[TOptimizationObject, float]: pass @abc.abstractmethod - def get_solution(self) -> TOptimizationObject | List[TOptimizationObject] | None: + def get_solution(self) -> TOptimizationObject | List[TOptimizationObject]: pass @abc.abstractmethod - def get_cost_value(self) -> float | None: + def get_cost_value(self) -> float: pass @abc.abstractmethod @@ -42,7 +42,3 @@ def add_cost(self, input_cost: cs.MX) -> None: @abc.abstractmethod def add_constraint(self, input_constraint: cs.MX) -> None: pass - - @abc.abstractmethod - def cost_function(self) -> cs.MX: - pass From 26ccb77a403f81163c9bd2032d20f62b388f8b28 Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 31 Mar 2023 12:35:17 +0200 Subject: [PATCH 20/26] Avoid pytest warnings --- test/test_base.py | 8 ++++---- test/test_optimization_problem.py | 14 +++++++------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/test/test_base.py b/test/test_base.py index af59dd4f..1759555f 100644 --- a/test/test_base.py +++ b/test/test_base.py @@ -15,7 +15,7 @@ @dataclasses.dataclass -class TestVariable(OptimizationObject): +class MyTestVariable(OptimizationObject): storage: StorageType = default_storage_field(cls=ContinuousVariable) def __post_init__(self): @@ -23,7 +23,7 @@ def __post_init__(self): @dataclasses.dataclass -class TestParameter(OptimizationObject): +class MyTestParameter(OptimizationObject): storage: StorageType = default_storage_field(cls=Parameter) def __post_init__(self): @@ -31,14 +31,14 @@ def __post_init__(self): def test_zero_variable(): - test_var = TestVariable() + test_var = MyTestVariable() test_var_zero = test_var.get_default_initialized_object() assert test_var_zero.storage.shape == (3,) assert np.all(test_var_zero.storage == 0) def test_zero_parameter(): - test_par = TestParameter() + test_par = MyTestParameter() test_par_zero = test_par.get_default_initialized_object() assert test_par_zero.storage.shape == (3,) assert np.all(test_par_zero.storage == 0) diff --git a/test/test_optimization_problem.py b/test/test_optimization_problem.py index 1b0a22c9..d17f3a6e 100644 --- a/test/test_optimization_problem.py +++ b/test/test_optimization_problem.py @@ -16,7 +16,7 @@ @dataclasses.dataclass -class TestVar(OptimizationObject): +class MyTestVar(OptimizationObject): variable: StorageType = default_storage_field(ContinuousVariable) def __post_init__(self): @@ -25,7 +25,7 @@ def __post_init__(self): def test_opti_solver(): problem = OptimizationProblem() - var = problem.generate_optimization_objects(input_structure=TestVar()) + var = problem.generate_optimization_objects(input_structure=MyTestVar()) np.random.seed(123) a = 10.0 * np.random.rand(3) + 0.01 b = 20.0 * np.random.rand(3) - 10.0 @@ -65,8 +65,8 @@ def test_opti_solver(): @dataclasses.dataclass -class TestVarAndPar(OptimizationObject): - composite: TestVar = dataclasses.field(default_factory=TestVar) +class MyTestVarAndPar(OptimizationObject): + composite: MyTestVar = dataclasses.field(default_factory=MyTestVar) parameter: StorageType = default_storage_field(Parameter) def __post_init__(self): @@ -75,8 +75,8 @@ def __post_init__(self): def test_opti_solver_with_parameters(): problem = OptimizationProblem() - initial_guess = TestVarAndPar() - var = problem.generate_optimization_objects(input_structure=TestVarAndPar()) + initial_guess = MyTestVarAndPar() + var = problem.generate_optimization_objects(input_structure=MyTestVarAndPar()) np.random.seed(123) a = 10.0 * np.random.rand(3) + 0.01 b = 20.0 * np.random.rand(3) - 10.0 @@ -129,7 +129,7 @@ def test_opti_solver_with_parameters_and_lists(): problem = OptimizationProblem() initial_guess = [] for _ in range(3): - initial_guess.append(TestVarAndPar()) + initial_guess.append(MyTestVarAndPar()) var = problem.generate_optimization_objects(input_structure=initial_guess) np.random.seed(123) From dfdb8116f854be4df6338f8f7e925cfb42ed207c Mon Sep 17 00:00:00 2001 From: Stefano Date: Fri, 31 Mar 2023 12:40:00 +0200 Subject: [PATCH 21/26] Added custom exception in case the problem has not been solved yet. --- src/hippopt/base/opti_solver.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index d26857fe..209adf77 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -11,6 +11,11 @@ from hippopt.base.parameter import Parameter +class ProblemNotSolvedException(Exception): + def __init__(self): + super().__init__("The problem has not been solved yet.") + + @dataclasses.dataclass class OptiSolver(OptimizationSolver): DefaultSolverType: ClassVar[str] = "ipopt" @@ -382,12 +387,12 @@ def solve(self) -> Tuple[TOptimizationObject, float]: def get_solution(self) -> TOptimizationObject | List[TOptimizationObject]: if self._output_solution is None: - raise ValueError("There is no valid output yet") + raise ProblemNotSolvedException return self._output_solution def get_cost_value(self) -> float: if self._output_cost is None: - raise ValueError("There is no valid output yet") + raise ProblemNotSolvedException return self._output_cost def add_cost(self, input_cost: cs.MX) -> None: From 875bd1a2f3a41dad61bb56372093804de7ea7fc1 Mon Sep 17 00:00:00 2001 From: Stefano Date: Thu, 6 Apr 2023 13:02:13 +0200 Subject: [PATCH 22/26] Using is instead of == when checking the storage type --- src/hippopt/base/opti_solver.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index 209adf77..014e3aed 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -53,7 +53,7 @@ def _generate_objects_from_instance( if ( has_storage_field and field.metadata[OptimizationObject.StorageTypeField] - == ContinuousVariable.StorageType + is ContinuousVariable.StorageType ): value = dataclasses.asdict(output)[field.name] @@ -73,7 +73,7 @@ def _generate_objects_from_instance( if ( has_storage_field and field.metadata[OptimizationObject.StorageTypeField] - == Parameter.StorageType + is Parameter.StorageType ): value = dataclasses.asdict(output)[field.name] @@ -146,11 +146,11 @@ def _generate_solution_output( if has_storage_field and ( ( field.metadata[OptimizationObject.StorageTypeField] - == ContinuousVariable.StorageType + is ContinuousVariable.StorageType ) or ( field.metadata[OptimizationObject.StorageTypeField] - == Parameter.StorageType + is Parameter.StorageType ) ): var = dataclasses.asdict(variables)[field.name] @@ -185,7 +185,7 @@ def _set_initial_guess_internal( if ( has_storage_field and field.metadata[OptimizationObject.StorageTypeField] - == ContinuousVariable.StorageType + is ContinuousVariable.StorageType ): guess = dataclasses.asdict(initial_guess)[field.name] @@ -227,7 +227,7 @@ def _set_initial_guess_internal( if ( has_storage_field and field.metadata[OptimizationObject.StorageTypeField] - == Parameter.StorageType + is Parameter.StorageType ): guess = dataclasses.asdict(initial_guess)[field.name] From 34ba2dd1f1c28c03de676d35c630f65cf3a1dc65 Mon Sep 17 00:00:00 2001 From: Stefano Date: Thu, 6 Apr 2023 13:59:08 +0200 Subject: [PATCH 23/26] Using InitVar in optimization problem test --- test/test_optimization_problem.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/test/test_optimization_problem.py b/test/test_optimization_problem.py index d17f3a6e..3e8d777c 100644 --- a/test/test_optimization_problem.py +++ b/test/test_optimization_problem.py @@ -18,37 +18,39 @@ @dataclasses.dataclass class MyTestVar(OptimizationObject): variable: StorageType = default_storage_field(ContinuousVariable) + size: dataclasses.InitVar[int] = dataclasses.field(default=3) - def __post_init__(self): - self.variable = np.zeros(3) + def __post_init__(self, size: int = 3): + self.variable = np.zeros(size) def test_opti_solver(): + size = 4 problem = OptimizationProblem() - var = problem.generate_optimization_objects(input_structure=MyTestVar()) + var = problem.generate_optimization_objects(input_structure=MyTestVar(size=size)) np.random.seed(123) - a = 10.0 * np.random.rand(3) + 0.01 - b = 20.0 * np.random.rand(3) - 10.0 - c = 20.0 * np.random.rand(3) - 10.0 + a = 10.0 * np.random.rand(size) + 0.01 + b = 20.0 * np.random.rand(size) - 10.0 + c = 20.0 * np.random.rand(size) - 10.0 problem.add_expression( mode=ExpressionType.minimize, expression=( a[k] * cs.power(var.variable[k], 2) + b[k] * var.variable[k] - for k in range(0, 3) + for k in range(size) ), ) problem.add_expression( mode=ExpressionType.subject_to, - expression=(var.variable[k] >= c[k] for k in range(3)), # noqa + expression=(var.variable[k] >= c[k] for k in range(size)), # noqa ) output, cost_value = problem.solver().solve() - expected_x = np.zeros(3) + expected_x = np.zeros(size) expected_cost = 0 - for i in range(3): + for i in range(size): expected = -b[i] / (2 * a[i]) expected_x[i] = expected if expected >= c[i] else c[i] expected_cost += ( From 57e4a17955809a4570be0090d9e38fe8b301f3bf Mon Sep 17 00:00:00 2001 From: Stefano Date: Thu, 6 Apr 2023 14:25:47 +0200 Subject: [PATCH 24/26] Added possibility to set solver options and plugins at construction time. --- src/hippopt/base/opti_solver.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index 014e3aed..d52632a9 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -24,6 +24,12 @@ class OptiSolver(OptimizationSolver): _options_plugin: dict[str, Any] = dataclasses.field(default_factory=dict) _options_solver: dict[str, Any] = dataclasses.field(default_factory=dict) + options_solver: dataclasses.InitVar[dict[str, Any]] = dataclasses.field( + default=None + ) + options_plugin: dataclasses.InitVar[dict[str, Any]] = dataclasses.field( + default=None + ) _cost: cs.MX = dataclasses.field(default=None) _solver: cs.Opti = dataclasses.field(default=None) @@ -36,8 +42,19 @@ class OptiSolver(OptimizationSolver): default=None ) - def __post_init__(self, problem_type: str) -> None: + def __post_init__( + self, + problem_type: str, + options_solver: dict[str, Any] = None, + options_plugin: dict[str, Any] = None, + ): self._solver = cs.Opti(problem_type) + self._options_solver = ( + options_solver if isinstance(options_solver, dict) else {} + ) + self._options_plugin = ( + options_plugin if isinstance(options_plugin, dict) else {} + ) self._solver.solver( self._inner_solver, self._options_plugin, self._options_solver ) @@ -364,8 +381,8 @@ def set_initial_guess( def set_opti_options( self, inner_solver: str = None, - options_plugin: dict[str, Any] = None, options_solver: dict[str, Any] = None, + options_plugin: dict[str, Any] = None, ) -> None: if inner_solver is not None: self._inner_solver = inner_solver From e5614fea6eba1c57ecb39c39f7b7aeb055204da4 Mon Sep 17 00:00:00 2001 From: Stefano Date: Thu, 6 Apr 2023 17:06:00 +0200 Subject: [PATCH 25/26] Using an object for the solver output. --- src/hippopt/base/opti_solver.py | 8 +++-- src/hippopt/base/optimization_solver.py | 20 +++++++++-- test/test_optimization_problem.py | 44 +++++++++++++------------ 3 files changed, 46 insertions(+), 26 deletions(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index d52632a9..ae0f1f39 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -7,7 +7,7 @@ from hippopt.base.continuous_variable import ContinuousVariable from hippopt.base.optimization_object import OptimizationObject, TOptimizationObject -from hippopt.base.optimization_solver import OptimizationSolver +from hippopt.base.optimization_solver import OptimizationSolver, SolverOutput from hippopt.base.parameter import Parameter @@ -395,12 +395,14 @@ def set_opti_options( self._inner_solver, self._options_plugin, self._options_solver ) - def solve(self) -> Tuple[TOptimizationObject, float]: + def solve(self) -> SolverOutput: self._solver.minimize(self._cost) self._opti_solution = self._solver.solve() self._output_cost = self._opti_solution.value(self._cost) self._output_solution = self._generate_solution_output(self._variables) - return self._output_solution, self._output_cost + return SolverOutput( + _values=self._output_solution, _cost_value=self._output_cost + ) def get_solution(self) -> TOptimizationObject | List[TOptimizationObject]: if self._output_solution is None: diff --git a/src/hippopt/base/optimization_solver.py b/src/hippopt/base/optimization_solver.py index 5a861376..9d8e00e4 100644 --- a/src/hippopt/base/optimization_solver.py +++ b/src/hippopt/base/optimization_solver.py @@ -1,12 +1,28 @@ import abc import dataclasses -from typing import List, Tuple, TypeVar +from typing import Generic, List, Tuple, TypeVar import casadi as cs from hippopt.base.optimization_object import TOptimizationObject TOptimizationSolver = TypeVar("TOptimizationSolver", bound="OptimizationSolver") +TGenericOptimizationObject = TypeVar("TGenericOptimizationObject") + + +@dataclasses.dataclass +class SolverOutput(Generic[TGenericOptimizationObject]): + values: TGenericOptimizationObject = dataclasses.field(default=None) + cost_value: float = None + + _values: dataclasses.InitVar[TGenericOptimizationObject] = dataclasses.field( + default=None + ) + _cost_value: dataclasses.InitVar[float] = dataclasses.field(default=None) + + def __post_init__(self, _values: TGenericOptimizationObject, _cost_value: float): + self.values = _values + self.cost_value = _cost_value @dataclasses.dataclass @@ -24,7 +40,7 @@ def set_initial_guess( pass @abc.abstractmethod - def solve(self) -> Tuple[TOptimizationObject, float]: + def solve(self) -> SolverOutput: pass @abc.abstractmethod diff --git a/test/test_optimization_problem.py b/test/test_optimization_problem.py index 3e8d777c..06e218da 100644 --- a/test/test_optimization_problem.py +++ b/test/test_optimization_problem.py @@ -46,7 +46,7 @@ def test_opti_solver(): expression=(var.variable[k] >= c[k] for k in range(size)), # noqa ) - output, cost_value = problem.solver().solve() + output = problem.solver().solve() expected_x = np.zeros(size) expected_cost = 0 @@ -59,8 +59,8 @@ def test_opti_solver(): else a[i] * (c[i] ** 2) + b[i] * c[i] ) - assert output.variable == pytest.approx(expected_x) - assert cost_value == pytest.approx(expected_cost) + assert output.values.variable == pytest.approx(expected_x) + assert output.cost_value == pytest.approx(expected_cost) assert problem.solver().get_solution().variable == pytest.approx(expected_x) assert problem.solver().get_cost_value() == pytest.approx(expected_cost) @@ -104,7 +104,7 @@ def test_opti_solver_with_parameters(): problem.solver().set_initial_guess(initial_guess=initial_guess) - output, cost_value = problem.solver().solve() + output = problem.solver().solve() expected_x = np.zeros(3) expected_cost = 0 @@ -117,9 +117,9 @@ def test_opti_solver_with_parameters(): else a[i] * (c[i] ** 2) + b[i] * c[i] ) - assert output.composite.variable == pytest.approx(expected_x) - assert cost_value == pytest.approx(expected_cost) - assert output.parameter == pytest.approx(c) + assert output.values.composite.variable == pytest.approx(expected_x) + assert output.cost_value == pytest.approx(expected_cost) + assert output.values.parameter == pytest.approx(c) assert problem.solver().get_solution().composite.variable == pytest.approx( expected_x @@ -161,7 +161,7 @@ def test_opti_solver_with_parameters_and_lists(): problem.solver().set_initial_guess(initial_guess=initial_guess) - output, cost_value = problem.solver().solve() + output = problem.solver().solve() expected_x = np.zeros(3) expected_cost = 0 @@ -175,10 +175,10 @@ def test_opti_solver_with_parameters_and_lists(): else a[i][j] * (c[i][j] ** 2) + b[i][j] * c[i][j] ) - assert output[i].composite.variable == pytest.approx(expected_x) - assert output[i].parameter == pytest.approx(c[i]) + assert output.values[i].composite.variable == pytest.approx(expected_x) + assert output.values[i].parameter == pytest.approx(c[i]) - assert cost_value == pytest.approx(expected_cost) + assert output.cost_value == pytest.approx(expected_cost) assert problem.solver().get_cost_value() == pytest.approx(expected_cost) @@ -203,10 +203,10 @@ def test_switch_costs(): initial_problem.add_expression( ExpressionType.subject_to, variables.x + variables.y == a - 1 ) # noqa - output, cost_value = initial_problem.solver().solve() + output = initial_problem.solver().solve() expected_cost = a + (a - 2) ** 2 - assert cost_value == pytest.approx(expected=expected_cost, rel=0.1) - assert output.x == pytest.approx(a - 2, rel=0.1) + assert output.cost_value == pytest.approx(expected=expected_cost, rel=0.1) + assert output.values.x == pytest.approx(a - 2, rel=0.1) new_problem = OptimizationProblem() new_variables = new_problem.generate_optimization_objects(SwitchVar()) @@ -221,10 +221,10 @@ def test_switch_costs(): new_variables.x * new_variables.x + 1, expected_value=1, ) - output, cost_value = new_problem.solver().solve() + output = new_problem.solver().solve() expected_cost = a * (a - 1) ** 2 - assert cost_value == pytest.approx(expected=expected_cost, rel=0.1) - assert output.x == pytest.approx(0, abs=1e-4) + assert output.cost_value == pytest.approx(expected=expected_cost, rel=0.1) + assert output.values.x == pytest.approx(0, abs=1e-4) def test_switch_constraints(): @@ -238,7 +238,7 @@ def test_switch_constraints(): initial_problem.add_expression( ExpressionType.subject_to, variables.x + variables.y == a - 1 ) # noqa - initial_output, initial_cost_value = initial_problem.solver().solve() + initial_output = initial_problem.solver().solve() new_problem = OptimizationProblem() new_variables = new_problem.generate_optimization_objects(SwitchVar()) @@ -251,6 +251,8 @@ def test_switch_constraints(): new_problem.add_expression( ExpressionType.minimize, new_variables.x == 5, scaling=1.0 ) - output, cost_value = new_problem.solver().solve() - assert cost_value == pytest.approx(expected=initial_cost_value, rel=0.1) - assert output.x == pytest.approx(initial_output.x) + output = new_problem.solver().solve() + assert output.cost_value == pytest.approx( + expected=initial_output.cost_value, rel=0.1 + ) + assert output.values.x == pytest.approx(initial_output.values.x) From 3310a860b02c8f25a80e42812ef6238ba1bfdd0f Mon Sep 17 00:00:00 2001 From: Stefano Date: Thu, 6 Apr 2023 17:18:05 +0200 Subject: [PATCH 26/26] Forced casting to np.array of the opti output. --- src/hippopt/base/opti_solver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hippopt/base/opti_solver.py b/src/hippopt/base/opti_solver.py index ae0f1f39..388cccaf 100644 --- a/src/hippopt/base/opti_solver.py +++ b/src/hippopt/base/opti_solver.py @@ -171,7 +171,7 @@ def _generate_solution_output( ) ): var = dataclasses.asdict(variables)[field.name] - output.__setattr__(field.name, self._opti_solution.value(var)) + output.__setattr__(field.name, np.array(self._opti_solution.value(var))) continue composite_variable = variables.__getattribute__(field.name)